Author: olga
Date: Thu Apr 30 22:44:03 2009
New Revision: 770478

URL: http://svn.apache.org/viewvc?rev=770478&view=rev
Log:
merge from trunk

Added:
    
hadoop/pig/branches/multiquery/src/org/apache/pig/impl/logicalLayer/schema/SchemaUtil.java
    hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestSchemaUtil.java
Modified:
    hadoop/pig/branches/multiquery/   (props changed)
    hadoop/pig/branches/multiquery/CHANGES.txt
    hadoop/pig/branches/multiquery/build.xml
    
hadoop/pig/branches/multiquery/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/PORelationToExprProject.java
    
hadoop/pig/branches/multiquery/src/org/apache/pig/data/DefaultAbstractBag.java
    
hadoop/pig/branches/multiquery/src/org/apache/pig/data/NonSpillableDataBag.java
    hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestDataBag.java
    
hadoop/pig/branches/multiquery/test/org/apache/pig/test/utils/dotGraph/DOTParser.jjt
   (props changed)

Propchange: hadoop/pig/branches/multiquery/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Apr 30 22:44:03 2009
@@ -1 +1 @@
-/hadoop/pig/trunk:741728-767341
+/hadoop/pig/trunk:741728-767341,767975-770110

Modified: hadoop/pig/branches/multiquery/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/CHANGES.txt?rev=770478&r1=770477&r2=770478&view=diff
==============================================================================
--- hadoop/pig/branches/multiquery/CHANGES.txt (original)
+++ hadoop/pig/branches/multiquery/CHANGES.txt Thu Apr 30 22:44:03 2009
@@ -28,6 +28,14 @@
 
 PIG-700: To automate the pig patch test process (gkesavan via sms)
 
+PIG-712: Added utility functions to create schemas for tuples and bags (zjffdu
+via gates).
+
+PIG-775: PORelationToExprProject should create a NonSpillableDataBag to create
+empty bags (pradeepkth)
+
+PIG-743: To implement clover (gkesavan)
+
 BUG FIXES
 
 PIG-733: Order by sampling dumps entire sample to hdfs which causes dfs

Modified: hadoop/pig/branches/multiquery/build.xml
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/build.xml?rev=770478&r1=770477&r2=770478&view=diff
==============================================================================
--- hadoop/pig/branches/multiquery/build.xml (original)
+++ hadoop/pig/branches/multiquery/build.xml Thu Apr 30 22:44:03 2009
@@ -108,6 +108,18 @@
     <property name="make.cmd" value="make"/>
     <property name="test_patch_sh" value="${test.src.dir}/bin/test-patch.sh"/>
 
+    <property name="clover.db.dir" location="${build.dir}/test/clover/db"/>
+    <property name="clover.report.dir" 
location="${build.dir}/test/clover/reports"/>
+    <property name="clover.jar" location="${clover.home}/lib/clover.jar"/>
+    <available property="clover.present" file="${clover.jar}" />
+    <!-- check if clover reports should be generated -->
+    <condition property="clover.enabled">
+       <and>
+               <isset property="run.clover"/>
+               <isset property="clover.present"/>
+        </and>
+    </condition>
+
     <!-- ====================================================== -->
     <!-- Stuff needed by all targets                            -->
     <!-- ====================================================== -->
@@ -396,6 +408,7 @@
                 <pathelement location="${output.jarfile}" />
                 <pathelement location="${test.build.classes}" />
                 <pathelement location="${junit.hadoop.conf}" />
+               <pathelement path="${clover.jar}"/>
                 <path refid="classpath"/>
             </classpath>
             <formatter type="${test.junit.output.format}" />
@@ -638,4 +651,45 @@
                <arg value="${ant.project.name}"/>
        </exec>
      </target>
+
+     <target name="clover" depends="clover.setup, clover.info" 
description="Instrument the Unit tests using Clover.  
+               To use, specify -Dclover.home=&lt;base of clover 
installation&gt; -Drun.clover=true on the command line."/>
+
+     <target name="clover.setup" if="clover.enabled">
+       <taskdef resource="cloverlib.xml" classpath="${clover.jar}"/>
+        <mkdir dir="${clover.db.dir}"/>
+        <clover-setup initString="${clover.db.dir}/pig_coverage.db">
+               <fileset dir="src" includes="**/*.java"/>
+        </clover-setup>
+     </target>
+
+     <target name="clover.info" unless="clover.present">
+               <echo>
+               Clover not found. Code coverage reports disabled.
+               </echo>
+     </target>
+
+     <target name="clover.check">
+       <fail unless="clover.present">
+               
##################################################################
+               Clover not found.
+               Please specify -Dclover.home=&lt;base of clover installation&gt;
+               on the command line.
+               
##################################################################
+        </fail>
+     </target>
+
+     <target name="generate-clover-reports" depends="clover.check, clover">
+       <mkdir dir="${clover.report.dir}"/>
+       <clover-report>
+               <current outfile="${clover.report.dir}" title="${final.name}">
+                       <format type="html"/>
+               </current>
+       </clover-report>
+        <clover-report>
+               <current outfile="${clover.report.dir}/clover.xml" 
title="${final.name}">
+                       <format type="xml"/>
+               </current>
+        </clover-report>
+     </target>
 </project>

Modified: 
hadoop/pig/branches/multiquery/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/PORelationToExprProject.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/PORelationToExprProject.java?rev=770478&r1=770477&r2=770478&view=diff
==============================================================================
--- 
hadoop/pig/branches/multiquery/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/PORelationToExprProject.java
 (original)
+++ 
hadoop/pig/branches/multiquery/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/PORelationToExprProject.java
 Thu Apr 30 22:44:03 2009
@@ -25,6 +25,7 @@
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhyPlanVisitor;
 import org.apache.pig.data.DataBag;
 import org.apache.pig.data.DataType;
+import org.apache.pig.data.NonSpillableDataBag;
 import org.apache.pig.impl.plan.NodeIdGenerator;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.plan.VisitorException;
@@ -109,7 +110,7 @@
                 // we received an EOP from the predecessor
                 // since the successor in the pipeline is
                 // expecting a bag, send an empty bag
-                input.result = bagFactory.newDefaultBag();
+                input.result = new NonSpillableDataBag();
                 input.returnStatus = POStatus.STATUS_OK;
                 // we should send EOP the next time we are called
                 // if the foreach in which this operator is present

Modified: 
hadoop/pig/branches/multiquery/src/org/apache/pig/data/DefaultAbstractBag.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/src/org/apache/pig/data/DefaultAbstractBag.java?rev=770478&r1=770477&r2=770478&view=diff
==============================================================================
--- 
hadoop/pig/branches/multiquery/src/org/apache/pig/data/DefaultAbstractBag.java 
(original)
+++ 
hadoop/pig/branches/multiquery/src/org/apache/pig/data/DefaultAbstractBag.java 
Thu Apr 30 22:44:03 2009
@@ -188,7 +188,7 @@
                 while (i.hasNext()) thisClone.add(i.next());
             }
             if (other instanceof SortedDataBag ||
-                    this instanceof DistinctDataBag) {
+                    other instanceof DistinctDataBag) {
                 otherClone = bOther;
             } else {
                 otherClone = new SortedDataBag(null);

Modified: 
hadoop/pig/branches/multiquery/src/org/apache/pig/data/NonSpillableDataBag.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/src/org/apache/pig/data/NonSpillableDataBag.java?rev=770478&r1=770477&r2=770478&view=diff
==============================================================================
--- 
hadoop/pig/branches/multiquery/src/org/apache/pig/data/NonSpillableDataBag.java 
(original)
+++ 
hadoop/pig/branches/multiquery/src/org/apache/pig/data/NonSpillableDataBag.java 
Thu Apr 30 22:44:03 2009
@@ -203,8 +203,26 @@
                 else return -1;
             }
 
-            Iterator<Tuple> thisIt = this.iterator();
-            Iterator<Tuple> otherIt = bOther.iterator();
+            // Ugh, this is bogus.  But I have to know if two bags have the
+            // same tuples, regardless of order.  Hopefully most of the
+            // time the size check above will prevent this.
+            // If either bag isn't already sorted, create a sorted bag out
+            // of it so I can guarantee order.
+            DataBag thisClone;
+            DataBag otherClone;
+            thisClone = new SortedDataBag(null);
+            Iterator<Tuple> i = iterator();
+            while (i.hasNext()) thisClone.add(i.next());
+            if (other instanceof SortedDataBag ||
+                    other instanceof DistinctDataBag) {
+                otherClone = bOther;
+            } else {
+                otherClone = new SortedDataBag(null);
+                i = bOther.iterator();
+                while (i.hasNext()) otherClone.add(i.next());
+            }
+            Iterator<Tuple> thisIt = thisClone.iterator();
+            Iterator<Tuple> otherIt = otherClone.iterator();
             while (thisIt.hasNext() && otherIt.hasNext()) {
                 Tuple thisT = thisIt.next();
                 Tuple otherT = otherIt.next();

Added: 
hadoop/pig/branches/multiquery/src/org/apache/pig/impl/logicalLayer/schema/SchemaUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/src/org/apache/pig/impl/logicalLayer/schema/SchemaUtil.java?rev=770478&view=auto
==============================================================================
--- 
hadoop/pig/branches/multiquery/src/org/apache/pig/impl/logicalLayer/schema/SchemaUtil.java
 (added)
+++ 
hadoop/pig/branches/multiquery/src/org/apache/pig/impl/logicalLayer/schema/SchemaUtil.java
 Thu Apr 30 22:44:03 2009
@@ -0,0 +1,261 @@
+package org.apache.pig.impl.logicalLayer.schema;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.pig.data.DataType;
+import org.apache.pig.impl.logicalLayer.FrontendException;
+
+/**
+ * 
+ * A utility class for simplify the schema creation, especially for bag and
+ * tuple schema. Currently, it only support simple schema creation, nested 
tuple
+ * and bag is not supported
+ * 
+ */
+
+public class SchemaUtil {
+
+    private static Set<Byte> SUPPORTED_TYPE_SET;
+
+    static {
+        SUPPORTED_TYPE_SET = new HashSet<Byte>();
+
+        SUPPORTED_TYPE_SET.add(DataType.INTEGER);
+        SUPPORTED_TYPE_SET.add(DataType.LONG);
+        SUPPORTED_TYPE_SET.add(DataType.CHARARRAY);
+        SUPPORTED_TYPE_SET.add(DataType.BOOLEAN);
+        SUPPORTED_TYPE_SET.add(DataType.BYTE);
+        SUPPORTED_TYPE_SET.add(DataType.BYTEARRAY);
+        SUPPORTED_TYPE_SET.add(DataType.DOUBLE);
+        SUPPORTED_TYPE_SET.add(DataType.FLOAT);
+        SUPPORTED_TYPE_SET.add(DataType.MAP);
+    }
+
+    /**
+     * Create a new tuple schema according the tuple name and two list: names 
of
+     * fields, types of fields
+     * 
+     * @param tupleName
+     * @param fieldNames
+     * @param dataTypes
+     * @return tuple schema
+     * @throws FrontendException
+     */
+    public static Schema newTupleSchema(String tupleName,
+            List<String> fieldNames, List<Byte> dataTypes)
+            throws FrontendException {
+        checkParameters(fieldNames, dataTypes);
+
+        List<Schema.FieldSchema> tokenSchemas = new 
ArrayList<Schema.FieldSchema>();
+        for (int i = 0; i < fieldNames.size(); ++i) {
+            String name = fieldNames.get(i);
+            Byte type = dataTypes.get(i);
+            tokenSchemas.add(new Schema.FieldSchema(name, type));
+        }
+
+        Schema tupleSchema = new Schema(tokenSchemas);
+        Schema.FieldSchema tupleField = new Schema.FieldSchema(tupleName,
+                tupleSchema);
+
+        return new Schema(tupleField);
+    }
+
+    /**
+     * Create a new tuple schema according the tuple name and two arrays: names
+     * of fields, types of fields
+     * 
+     * @param tupleName
+     * @param fieldNames
+     * @param dataTypes
+     * @return tuple schema
+     * @throws FrontendException
+     */
+    public static Schema newTupleSchema(String tupleName, String[] fieldNames,
+            Byte[] dataTypes) throws FrontendException {
+        return newTupleSchema(tupleName, Arrays.asList(fieldNames), Arrays
+                .asList(dataTypes));
+    }
+
+    /**
+     * Create a new tuple schema according the two list: names of fields, types
+     * of fields, the default tuple name is t.
+     * 
+     * @param fieldNames
+     * @param dataTypes
+     * @return tuple schema
+     * @throws FrontendException
+     */
+    public static Schema newTupleSchema(List<String> fieldNames,
+            List<Byte> dataTypes) throws FrontendException {
+        return newTupleSchema("t", fieldNames, dataTypes);
+    }
+
+    /**
+     * Create a new tuple schema according one list: types of fields, the
+     * default names of fields are f0,f1,f2..., and the tuple name is t.
+     * 
+     * @param dataTypes
+     * @return tuple schema
+     * @throws FrontendException
+     */
+    public static Schema newTupleSchema(List<Byte> dataTypes)
+            throws FrontendException {
+        List<String> names = newNames(dataTypes.size());
+        return newTupleSchema("t", names, dataTypes);
+    }
+
+    /**
+     * Create a new tuple schema according the two arrays: names of fields,
+     * types of fields, the default tuple name is t.
+     * 
+     * @param names
+     * @param dataTypes
+     * @return tuple schema
+     * @throws FrontendException
+     */
+    public static Schema newTupleSchema(String[] names, Byte[] dataTypes)
+            throws FrontendException {
+        return newTupleSchema("t", Arrays.asList(names), Arrays
+                .asList(dataTypes));
+    }
+
+    /**
+     * Create a new tuple schema according one array: types of fields, the
+     * default names of fields are f0,f1,f2..., and the tuple name is t.
+     * 
+     * @param dataTypes
+     * @return tuple schema
+     * @throws FrontendException
+     */
+    public static Schema newTupleSchema(Byte[] dataTypes)
+            throws FrontendException {
+        return newTupleSchema(Arrays.asList(dataTypes));
+    }
+
+    private static List<String> newNames(int size) {
+        List<String> names = new ArrayList<String>();
+        for (int i = 0; i < size; ++i) {
+            names.add("f" + i);
+        }
+        return names;
+    }
+
+    /**
+     * Create a bag schema according the bag name,tuple name and two list: name
+     * of fields, type of fields
+     * 
+     * @param bagName
+     * @param tupleName
+     * @param fieldNames
+     * @param dataTypes
+     * @return bag schema
+     * @throws FrontendException
+     */
+    public static Schema newBagSchema(String bagName, String tupleName,
+            List<String> fieldNames, List<Byte> dataTypes)
+            throws FrontendException {
+        checkParameters(fieldNames, dataTypes);
+
+        Schema tupleSchema = newTupleSchema(tupleName, fieldNames, dataTypes);
+        Schema.FieldSchema bagField = new Schema.FieldSchema(bagName,
+                tupleSchema, DataType.BAG);
+
+        return new Schema(bagField);
+    }
+
+    public static Schema newBagSchema(String bagName, String tupleName,
+            String[] fieldNames, Byte[] dataTypes) throws FrontendException {
+        return newBagSchema(bagName, tupleName, Arrays.asList(fieldNames),
+                Arrays.asList(dataTypes));
+    }
+
+    /**
+     * Create a bag schema according two list: name of fields, type of fields,
+     * and the default bag name is b, the default tuple name is t.
+     * 
+     * @param names
+     * @param dataTypes
+     * @return bag schema
+     * @throws FrontendException
+     */
+    public static Schema newBagSchema(List<String> names, List<Byte> dataTypes)
+            throws FrontendException {
+        checkParameters(names, dataTypes);
+
+        Schema tupleSchema = newTupleSchema(names, dataTypes);
+        Schema.FieldSchema bagField = new Schema.FieldSchema("b", tupleSchema,
+                DataType.BAG);
+
+        return new Schema(bagField);
+    }
+
+    /**
+     * Create a new tuple schema according one list: types of fields, the
+     * default names of fields are f0,f1,f2..., and the tuple is t, the bag 
name
+     * is b.
+     * 
+     * @param dataTypes
+     * @return bag schema
+     * @throws FrontendException
+     */
+    public static Schema newBagSchema(List<Byte> dataTypes)
+            throws FrontendException {
+        List<String> names = newNames(dataTypes.size());
+        return newBagSchema(names, dataTypes);
+    }
+
+    /**
+     * Create a new tuple schema according two arrays: names of field,types of
+     * fields. The default tuple name is t, and the bag is b.
+     * 
+     * @param names
+     * @param dataTypes
+     * @return bag schema
+     * @throws FrontendException
+     */
+    public static Schema newBagSchema(String[] names, Byte[] dataTypes)
+            throws FrontendException {
+        return newBagSchema(Arrays.asList(names), Arrays.asList(dataTypes));
+    }
+
+    /**
+     * Create a new tuple schema according one array: the type of fields, the
+     * tuple name is t, and the bag name is b.
+     * 
+     * @param dataTypes
+     * @return bag schema
+     * @throws FrontendException
+     */
+    public static Schema newBagSchema(Byte[] dataTypes)
+            throws FrontendException {
+        return newBagSchema(Arrays.asList(dataTypes));
+    }
+
+    private static void checkDataTypes(List<Byte> dataTypes)
+            throws FrontendException {
+        for (Byte type : dataTypes) {
+            if (!SUPPORTED_TYPE_SET.contains(type)) {
+                throw new FrontendException(
+                        "Currently pig do not support this kind of type using 
Schema:"
+                                + DataType.findTypeName(type)
+                                + ". You can write shema by yourself.");
+            }
+        }
+
+    }
+
+    private static void checkParameters(List<String> names, List<Byte> 
dataTypes)
+            throws FrontendException {
+        // TODO Auto-generated method stub
+        checkDataTypes(dataTypes);
+        if (names.size() != dataTypes.size()) {
+            throw new FrontendException(
+                    "The number of names is not equal to the number of 
dataTypes");
+        }
+    }
+
+}

Modified: 
hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestDataBag.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestDataBag.java?rev=770478&r1=770477&r2=770478&view=diff
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestDataBag.java 
(original)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestDataBag.java 
Thu Apr 30 22:44:03 2009
@@ -729,6 +729,68 @@
 
         BagFactory.resetSelf();
     }
+    
+    @Test
+    public void testNonSpillableDataBagEquals1() throws Exception {
+        String[][] tupleContents = new String[][] {{"a", "b"},{"c", "d" }, { 
"e", "f"} };
+        NonSpillableDataBag bg1 = new NonSpillableDataBag();
+        for (int i = 0; i < tupleContents.length; i++) {
+            bg1.add(Util.createTuple(tupleContents[i]));
+        }
+        NonSpillableDataBag bg2 = new NonSpillableDataBag();
+        for (int i = 0; i < tupleContents.length; i++) {
+            bg2.add(Util.createTuple(tupleContents[i]));
+        }
+        assertEquals(bg1, bg2);
+    }
+    
+    @Test
+    public void testNonSpillableDataBagEquals2() throws Exception {
+        String[][] tupleContents = new String[][] {{"a", "b"},{"c", "d" }, { 
"e", "f"} };
+        NonSpillableDataBag bg1 = new NonSpillableDataBag();
+        for (int i = 0; i < tupleContents.length; i++) {
+            bg1.add(Util.createTuple(tupleContents[i]));
+        }
+        tupleContents = new String[][] {{"c", "d" }, {"a", "b"},{ "e", "f"} };
+        NonSpillableDataBag bg2 = new NonSpillableDataBag();
+        for (int i = 0; i < tupleContents.length; i++) {
+            bg2.add(Util.createTuple(tupleContents[i]));
+        }
+        assertEquals(bg1, bg2);
+    }
+    
+    @Test
+    public void testDefaultDataBagEquals1() throws Exception {
+        String[][] tupleContents = new String[][] {{"a", "b"},{"c", "d" }, { 
"e", "f"} };
+        TestMemoryManager mgr = new TestMemoryManager();
+        LocalBagFactory factory = new LocalBagFactory(mgr);
+        DataBag bg1 = factory.newDefaultBag();
+        for (int i = 0; i < tupleContents.length; i++) {
+            bg1.add(Util.createTuple(tupleContents[i]));
+        }
+        DataBag bg2 = factory.newDefaultBag();
+        for (int i = 0; i < tupleContents.length; i++) {
+            bg2.add(Util.createTuple(tupleContents[i]));
+        }
+        assertEquals(bg1, bg2);
+    }
+    
+    @Test
+    public void testDefaultDataBagEquals2() throws Exception {
+        String[][] tupleContents = new String[][] {{"a", "b"},{"c", "d" }, { 
"e", "f"} };
+        TestMemoryManager mgr = new TestMemoryManager();
+        LocalBagFactory factory = new LocalBagFactory(mgr);
+        DataBag bg1 = factory.newDefaultBag();
+        for (int i = 0; i < tupleContents.length; i++) {
+            bg1.add(Util.createTuple(tupleContents[i]));
+        }
+        tupleContents = new String[][] {{"c", "d" }, {"a", "b"},{ "e", "f"} };
+        DataBag bg2 = factory.newDefaultBag();
+        for (int i = 0; i < tupleContents.length; i++) {
+            bg2.add(Util.createTuple(tupleContents[i]));
+        }
+        assertEquals(bg1, bg2);
+    }
 }
 
 

Added: 
hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestSchemaUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestSchemaUtil.java?rev=770478&view=auto
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestSchemaUtil.java 
(added)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestSchemaUtil.java 
Thu Apr 30 22:44:03 2009
@@ -0,0 +1,82 @@
+package org.apache.pig.test;
+
+import java.util.Arrays;
+
+import junit.framework.TestCase;
+
+import org.apache.pig.data.DataType;
+import org.apache.pig.impl.logicalLayer.FrontendException;
+import org.apache.pig.impl.logicalLayer.schema.Schema;
+import org.apache.pig.impl.logicalLayer.schema.SchemaUtil;
+
+public class TestSchemaUtil extends TestCase {
+
+    public void testTupleSchema() {
+        try {
+            String tupleName = "mytuple";
+            String[] fieldNames = new String[] { "field_0", "field_1" };
+            Byte[] dataTypes = new Byte[] { DataType.LONG, DataType.CHARARRAY 
};
+
+            String expected = "{mytuple: (field_0: long,field_1: chararray)}";
+            Schema tupleSchema = SchemaUtil.newTupleSchema(tupleName,
+                    fieldNames, dataTypes);
+            assertEquals(expected, tupleSchema.toString());
+
+            tupleSchema = SchemaUtil.newTupleSchema(tupleName, Arrays
+                    .asList(fieldNames), Arrays.asList(dataTypes));
+            assertEquals(expected, tupleSchema.toString());
+
+            expected = "{t: (field_0: long,field_1: chararray)}";
+            tupleSchema = SchemaUtil.newTupleSchema(fieldNames, dataTypes);
+            assertEquals(expected, tupleSchema.toString());
+
+            tupleSchema = SchemaUtil.newTupleSchema(Arrays.asList(fieldNames),
+                    Arrays.asList(dataTypes));
+            assertEquals(expected, tupleSchema.toString());
+
+            expected = "{t: (f0: long,f1: chararray)}";
+            tupleSchema = SchemaUtil.newTupleSchema(dataTypes);
+            assertEquals(expected, tupleSchema.toString());
+
+            tupleSchema = SchemaUtil.newTupleSchema(Arrays.asList(dataTypes));
+            assertEquals(expected, tupleSchema.toString());
+        } catch (FrontendException e) {
+            fail();
+        }
+    }
+
+    public void testBagSchema() {
+        try {
+            String bagName="mybag";
+            String tupleName = "mytuple";
+            String[] fieldNames = new String[] { "field_0", "field_1" };
+            Byte[] dataTypes = new Byte[] { DataType.LONG, DataType.CHARARRAY 
};
+
+            String expected = "{mybag: {mytuple: (field_0: long,field_1: 
chararray)}}";
+            Schema bagSchema = SchemaUtil.newBagSchema(bagName,tupleName,
+                    fieldNames, dataTypes);
+            assertEquals(expected, bagSchema.toString());
+
+            bagSchema = SchemaUtil.newBagSchema(bagName,tupleName, Arrays
+                    .asList(fieldNames), Arrays.asList(dataTypes));
+            assertEquals(expected, bagSchema.toString());
+
+            expected = "{b: {t: (field_0: long,field_1: chararray)}}";
+            bagSchema = SchemaUtil.newBagSchema(fieldNames, dataTypes);
+            assertEquals(expected, bagSchema.toString());
+
+            bagSchema = SchemaUtil.newBagSchema(Arrays.asList(fieldNames),
+                    Arrays.asList(dataTypes));
+            assertEquals(expected, bagSchema.toString());
+
+            expected = "{b: {t: (f0: long,f1: chararray)}}";
+            bagSchema = SchemaUtil.newBagSchema(dataTypes);
+            assertEquals(expected, bagSchema.toString());
+
+            bagSchema = SchemaUtil.newBagSchema(Arrays.asList(dataTypes));
+            assertEquals(expected, bagSchema.toString());
+        } catch (FrontendException e) {
+            fail();
+        }
+    }
+}

Propchange: 
hadoop/pig/branches/multiquery/test/org/apache/pig/test/utils/dotGraph/DOTParser.jjt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Apr 30 22:44:03 2009
@@ -1 +1 @@
-/hadoop/pig/trunk/test/org/apache/pig/test/utils/dotGraph/DOTParser.jjt:758070-767341
+/hadoop/pig/trunk/test/org/apache/pig/test/utils/dotGraph/DOTParser.jjt:758070-767341,767975-770110


Reply via email to