Modified: pig/branches/spark/test/org/apache/pig/test/TestIn.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestIn.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestIn.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestIn.java Tue Jan 27 02:27:45 
2015
@@ -25,7 +25,6 @@ import static org.junit.Assert.fail;
 
 import java.util.List;
 
-import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.builtin.mock.Storage.Data;
 import org.apache.pig.data.Tuple;
@@ -40,7 +39,7 @@ public class TestIn {
      */
     @Test
     public void testWithFilter() throws Exception {
-        PigServer pigServer = new PigServer(ExecType.LOCAL);
+        PigServer pigServer = new PigServer(Util.getLocalTestMode());
         Data data = resetData(pigServer);
 
         data.set("foo",
@@ -68,7 +67,7 @@ public class TestIn {
      */
     @Test
     public void testWithBincond() throws Exception {
-        PigServer pigServer = new PigServer(ExecType.LOCAL);
+        PigServer pigServer = new PigServer(Util.getLocalTestMode());
         Data data = resetData(pigServer);
 
         data.set("foo",
@@ -98,7 +97,7 @@ public class TestIn {
      */
     @Test
     public void testWithSplit() throws Exception {
-        PigServer pigServer = new PigServer(ExecType.LOCAL);
+        PigServer pigServer = new PigServer(Util.getLocalTestMode());
         Data data = resetData(pigServer);
 
         data.set("foo",
@@ -132,7 +131,7 @@ public class TestIn {
      */
     @Test
     public void testWithDereferenceOperator() throws Exception {
-        PigServer pigServer = new PigServer(ExecType.LOCAL);
+        PigServer pigServer = new PigServer(Util.getLocalTestMode());
         Data data = resetData(pigServer);
 
         data.set("foo",
@@ -161,7 +160,7 @@ public class TestIn {
      */
     @Test(expected = FrontendException.class)
     public void testMissingRhsOperand() throws Exception {
-        PigServer pigServer = new PigServer(ExecType.LOCAL);
+        PigServer pigServer = new PigServer(Util.getLocalTestMode());
         Data data = resetData(pigServer);
 
         data.set("foo",

Modified: pig/branches/spark/test/org/apache/pig/test/TestInfixArithmetic.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestInfixArithmetic.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestInfixArithmetic.java 
(original)
+++ pig/branches/spark/test/org/apache/pig/test/TestInfixArithmetic.java Tue 
Jan 27 02:27:45 2015
@@ -28,7 +28,6 @@ import java.util.Random;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.builtin.PigStorage;
 import org.apache.pig.data.DataType;
@@ -46,7 +45,7 @@ public class TestInfixArithmetic {
 
     @Before
     public void setUp() throws Exception {
-        pig = new PigServer(ExecType.LOCAL);
+        pig = new PigServer(Util.getLocalTestMode());
     }
 
     Boolean[] nullFlags = new Boolean[] { false, true };

Modified: 
pig/branches/spark/test/org/apache/pig/test/TestInputOutputFileValidator.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestInputOutputFileValidator.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- 
pig/branches/spark/test/org/apache/pig/test/TestInputOutputFileValidator.java 
(original)
+++ 
pig/branches/spark/test/org/apache/pig/test/TestInputOutputFileValidator.java 
Tue Jan 27 02:27:45 2015
@@ -28,7 +28,6 @@ import java.io.IOException;
 import java.util.Iterator;
 import java.util.Properties;
 
-import org.apache.pig.ExecType;
 import org.apache.pig.FuncSpec;
 import org.apache.pig.PigException;
 import org.apache.pig.PigServer;
@@ -57,9 +56,9 @@ public class TestInputOutputFileValidato
 
     @Before
     public void setUp() throws Exception {
-        ctx = new PigContext(ExecType.LOCAL, new Properties());
+        ctx = new PigContext(Util.getLocalTestMode(), new Properties());
         ctx.connect();
-        pig = new PigServer(ExecType.LOCAL);
+        pig = new PigServer(Util.getLocalTestMode());
     }
 
     @Test

Modified: pig/branches/spark/test/org/apache/pig/test/TestJoin.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestJoin.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestJoin.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestJoin.java Tue Jan 27 
02:27:45 2015
@@ -15,95 +15,49 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.pig.test;
 
-import static org.apache.pig.builtin.mock.Storage.resetData;
-import static org.apache.pig.builtin.mock.Storage.tuple;
-import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
 
-import java.io.File;
 import java.io.IOException;
 import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
 
-import org.apache.pig.ExecType;
-import org.apache.pig.PigException;
 import org.apache.pig.PigServer;
-import org.apache.pig.backend.executionengine.ExecException;
-import org.apache.pig.builtin.mock.Storage.Data;
-import org.apache.pig.data.BagFactory;
-import org.apache.pig.data.DataByteArray;
 import org.apache.pig.data.Tuple;
-import org.apache.pig.data.TupleFactory;
-import org.apache.pig.impl.logicalLayer.schema.Schema;
-import org.apache.pig.impl.util.LogUtils;
-import org.apache.pig.impl.util.Utils;
-import org.apache.pig.newplan.Operator;
-import org.apache.pig.newplan.logical.relational.LOJoin;
-import org.apache.pig.newplan.logical.relational.LOJoin.JOINTYPE;
-import org.apache.pig.newplan.logical.relational.LogicalPlan;
-import org.apache.pig.parser.ParserException;
 import org.junit.AfterClass;
+import org.junit.Before;
 import org.junit.Test;
 
-import com.google.common.collect.Sets;
-
 /**
  * Test cases to test join statement
  */
 
-public class TestJoin {
-
+public class TestJoin extends TestJoinBase {
     private static MiniGenericCluster cluster = 
MiniGenericCluster.buildCluster();
-    private PigServer pigServer;
-
-    TupleFactory mTf = TupleFactory.getInstance();
-    BagFactory mBf = BagFactory.getInstance();
-    private static ExecType[] execTypes = new ExecType[] {ExecType.LOCAL, 
cluster.getExecType()};
 
     @AfterClass
     public static void oneTimeTearDown() throws Exception {
         if (cluster != null) cluster.shutDown();
     }
 
-    private void setUp(ExecType execType) throws ExecException {
-        Util.resetStateForExecModeSwitch();
-        if(execType == cluster.getExecType()) {
-            pigServer = new PigServer(cluster.getExecType(), 
cluster.getProperties());
-        } else if(execType == ExecType.LOCAL) {
-            pigServer = new PigServer(ExecType.LOCAL);
-        }
+    @Before
+    public void setUp() throws Exception {
+        pigServer = new PigServer(cluster.getExecType(), 
cluster.getProperties());
     }
 
-    private String createInputFile(ExecType execType, String fileNameHint, 
String[] data) throws IOException {
+    protected String createInputFile(String fileNameHint, String[] data) 
throws IOException {
         String fileName = "";
-        if(execType == cluster.getExecType()) {
-            Util.createInputFile(cluster, fileNameHint, data);
-            fileName = fileNameHint;
-        } else if (execType == ExecType.LOCAL) {
-            File f = Util.createInputFile("test", fileNameHint, data);
-            fileName = Util.generateURI(f.getAbsolutePath(), 
pigServer.getPigContext());
-        }
+        Util.createInputFile(cluster, fileNameHint, data);
+        fileName = fileNameHint;
         return fileName;
     }
 
-    private void deleteInputFile(ExecType execType, String fileName) throws 
IOException {
-        if(execType == cluster.getExecType()) {
-            Util.deleteFile(cluster, fileName);
-        } else if(execType == ExecType.LOCAL){
-            fileName = fileName.replace("file://", "");
-            new File(fileName).delete();
-        }
+    protected void deleteInputFile(String fileName) throws Exception {
+        Util.deleteFile(cluster, fileName);
     }
 
     @Test
-    public void testJoinWithMissingFieldsInTuples() throws IOException{
-        setUp(cluster.getExecType());
+    public void testJoinWithMissingFieldsInTuples() throws Exception{
         String[] input1 = {
                 "ff ff ff",
                 "",
@@ -122,615 +76,15 @@ public class TestJoin {
                 ""
                 };
 
-        String firstInput = createInputFile(cluster.getExecType(), "a.txt", 
input1);
-        String secondInput = createInputFile(cluster.getExecType(), "b.txt", 
input2);
+        String firstInput = createInputFile("a.txt", input1);
+        String secondInput = createInputFile("b.txt", input2);
         String script = "a = load 'a.txt'  using PigStorage(' ');" +
         "b = load 'b.txt'  using PigStorage('\u0001');" +
         "c = join a by $0, b by $0;";
         Util.registerMultiLineQuery(pigServer, script);
         Iterator<Tuple> it = pigServer.openIterator("c");
         assertFalse(it.hasNext());
-        deleteInputFile(cluster.getExecType(), firstInput);
-        deleteInputFile(cluster.getExecType(), secondInput);
-    }
-
-    @Test
-    public void testJoinUnkownSchema() throws Exception {
-        // If any of the input schema is unknown, the resulting schema should 
be unknown as well
-        for (ExecType execType : execTypes) {
-            setUp(execType);
-            String script = "a = load 'a.txt';" +
-                    "b = load 'b.txt'; " +
-                    "c = join a by $0, b by $0;";
-            Util.registerMultiLineQuery(pigServer, script);
-            Schema schema = pigServer.dumpSchema("c");
-            assertNull(schema);
-        }
-    }
-
-    @Test
-    public void testDefaultJoin() throws IOException, ParserException {
-        for (ExecType execType : execTypes) {
-            setUp(execType);
-            String[] input1 = {
-                    "hello\t1",
-                    "bye\t2",
-                    "\t3"
-            };
-            String[] input2 = {
-                    "hello\tworld",
-                    "good\tmorning",
-                    "\tevening"
-            };
-
-            String firstInput = createInputFile(execType, "a.txt", input1);
-            String secondInput = createInputFile(execType, "b.txt", input2);
-            Tuple expectedResult = 
(Tuple)Util.getPigConstant("('hello',1,'hello','world')");
-
-            // with schema
-            String script = "a = load '"+ Util.encodeEscape(firstInput) +"' as 
(n:chararray, a:int); " +
-                    "b = load '"+ Util.encodeEscape(secondInput) +"' as 
(n:chararray, m:chararray); " +
-                    "c = join a by $0, b by $0;";
-            Util.registerMultiLineQuery(pigServer, script);
-            Iterator<Tuple> it = pigServer.openIterator("c");
-            assertTrue(it.hasNext());
-            assertEquals(expectedResult, it.next());
-            assertFalse(it.hasNext());
-
-            // without schema
-            script = "a = load '"+ Util.encodeEscape(firstInput) + "'; " +
-            "b = load '" + Util.encodeEscape(secondInput) + "'; " +
-            "c = join a by $0, b by $0;";
-            Util.registerMultiLineQuery(pigServer, script);
-            it = pigServer.openIterator("c");
-            assertTrue(it.hasNext());
-            assertEquals(expectedResult.toString(), it.next().toString());
-            assertFalse(it.hasNext());
-            deleteInputFile(execType, firstInput);
-            deleteInputFile(execType, secondInput);
-        }
-    }
-
-
-    @Test
-    public void testJoinSchema() throws Exception {
-        for (ExecType execType : execTypes) {
-            setUp(execType);
-            String[] input1 = {
-                    "1\t2",
-                    "2\t3",
-                    "3\t4"
-            };
-            String[] input2 = {
-                    "1\thello",
-                    "4\tbye",
-            };
-
-            String firstInput = createInputFile(execType, "a.txt", input1);
-            String secondInput = createInputFile(execType, "b.txt", input2);
-            Tuple expectedResult = 
(Tuple)Util.getPigConstant("(1,2,1,'hello',1,2,1,'hello')");
-
-            // with schema
-            String script = "a = load '"+ Util.encodeEscape(firstInput) +"' as 
(i:int, j:int); " +
-                    "b = load '"+ Util.encodeEscape(secondInput) +"' as 
(k:int, l:chararray); " +
-                    "c = join a by $0, b by $0;" +
-                    "d = foreach c generate i,j,k,l,a::i as ai,a::j as aj,b::k 
as bk,b::l as bl;";
-            Util.registerMultiLineQuery(pigServer, script);
-            Iterator<Tuple> it = pigServer.openIterator("d");
-            assertTrue(it.hasNext());
-            assertEquals(expectedResult, it.next());
-            assertFalse(it.hasNext());
-
-            // schema with duplicates
-            script = "a = load '"+ Util.encodeEscape(firstInput) +"' as 
(i:int, j:int); " +
-            "b = load '"+ Util.encodeEscape(secondInput) +"' as (i:int, 
l:chararray); " +
-            "c = join a by $0, b by $0;" +
-            "d = foreach c generate i,j,l,a::i as ai,a::j as aj,b::i as 
bi,b::l as bl;";
-            boolean exceptionThrown = false;
-            try{
-                Util.registerMultiLineQuery(pigServer, script);
-                pigServer.openIterator("d");
-            }catch (Exception e) {
-                PigException pe = LogUtils.getPigException(e);
-                assertEquals(1025, pe.getErrorCode());
-                exceptionThrown = true;
-            }
-            assertTrue(exceptionThrown);
-
-            // schema with duplicates with resolution
-            script = "a = load '"+ Util.encodeEscape(firstInput) +"' as 
(i:int, j:int); " +
-            "b = load '"+ Util.encodeEscape(secondInput) +"' as (i:int, 
l:chararray); " +
-            "c = join a by $0, b by $0;" +
-            "d = foreach c generate a::i as ai1,j,b::i as bi1,l,a::i as 
ai2,a::j as aj2,b::i as bi3,b::l as bl3;";
-            Util.registerMultiLineQuery(pigServer, script);
-            it = pigServer.openIterator("d");
-            assertTrue(it.hasNext());
-            assertEquals(expectedResult, it.next());
-            assertFalse(it.hasNext());
-            deleteInputFile(execType, firstInput);
-            deleteInputFile(execType, secondInput);
-        }
-    }
-
-    @Test
-    public void testJoinSchema2() throws Exception {
-        // test join where one load does not have schema
-        ExecType execType = ExecType.LOCAL;
-        setUp(execType );
-        String[] input1 = {
-                "1\t2",
-                "2\t3",
-                "3\t4"
-        };
-        String[] input2 = {
-                "1\thello",
-                "4\tbye",
-        };
-
-        String firstInput = createInputFile(execType, "a.txt", input1);
-        String secondInput = createInputFile(execType, "b.txt", input2);
-        Tuple expectedResultCharArray =
-            
(Tuple)Util.getPigConstant("('1','2','1','hello','1','2','1','hello')");
-
-        Tuple expectedResult = TupleFactory.getInstance().newTuple();
-        for(Object field : expectedResultCharArray.getAll()){
-            expectedResult.append(new DataByteArray(field.toString()));
-        }
-
-        // with schema
-        String script = "a = load '"+ Util.encodeEscape(firstInput) +"' ; " +
-        //re-using alias a for new operator below, doing this intentionally
-        // because such use case has been seen
-        "a = foreach a generate $0 as i, $1 as j ;" +
-        "b = load '"+ Util.encodeEscape(secondInput) +"' as (k, l); " +
-        "c = join a by $0, b by $0;" +
-        "d = foreach c generate i,j,k,l,a::i as ai,a::j as aj,b::k as bk,b::l 
as bl;";
-        Util.registerMultiLineQuery(pigServer, script);
-        Iterator<Tuple> it = pigServer.openIterator("d");
-        assertTrue(it.hasNext());
-        Tuple res = it.next();
-        assertEquals(expectedResult, res);
-        assertFalse(it.hasNext());
-        deleteInputFile(execType, firstInput);
-        deleteInputFile(execType, secondInput);
-
-    }
-
-    @Test
-    public void testLeftOuterJoin() throws Exception {
-        for (ExecType execType : execTypes) {
-            setUp(execType);
-            String[] input1 = {
-                    "hello\t1",
-                    "bye\t2",
-                    "\t3"
-            };
-            String[] input2 = {
-                    "hello\tworld",
-                    "good\tmorning",
-                    "\tevening"
-
-            };
-
-            String firstInput = createInputFile(execType, "a.txt", input1);
-            String secondInput = createInputFile(execType, "b.txt", input2);
-            List<Tuple> expectedResults = 
Util.getTuplesFromConstantTupleStrings(
-                    new String[] {
-                            "('hello',1,'hello','world')",
-                            "('bye',2,null,null)",
-                            "(null,3,null,null)"
-                    });
-
-            // with and without optional outer
-            for(int i = 0; i < 2; i++) {
-                //with schema
-                String script = "a = load '"+ Util.encodeEscape(firstInput) 
+"' as (n:chararray, a:int); " +
-                        "b = load '"+ Util.encodeEscape(secondInput) +"' as 
(n:chararray, m:chararray); ";
-                if(i == 0) {
-                    script +=  "c = join a by $0 left outer, b by $0;" ;
-                } else {
-                    script +=  "c = join a by $0 left, b by $0;" ;
-                }
-                script += "d = order c by $1;";
-                // ensure we parse correctly
-                Util.buildLp(pigServer, script);
-
-                // run query and test results only once
-                if(i == 0) {
-                    Util.registerMultiLineQuery(pigServer, script);
-                    Iterator<Tuple> it = pigServer.openIterator("d");
-                    int counter= 0;
-                    while(it.hasNext()) {
-                        assertEquals(expectedResults.get(counter++), 
it.next());
-                    }
-                    assertEquals(expectedResults.size(), counter);
-
-                    // without schema
-                    script = "a = load '"+ Util.encodeEscape(firstInput) +"'; 
" +
-                    "b = load '"+ Util.encodeEscape(secondInput) +"'; ";
-                    if(i == 0) {
-                        script +=  "c = join a by $0 left outer, b by $0;" ;
-                    } else {
-                        script +=  "c = join a by $0 left, b by $0;" ;
-                    }
-                    try {
-                        Util.registerMultiLineQuery(pigServer, script);
-                    } catch (Exception e) {
-                        PigException pe = LogUtils.getPigException(e);
-                        assertEquals(1105, pe.getErrorCode());
-                    }
-                }
-            }
-            deleteInputFile(execType, firstInput);
-            deleteInputFile(execType, secondInput);
-        }
-    }
-
-    @Test
-    public void testRightOuterJoin() throws Exception {
-        for (ExecType execType : execTypes) {
-            setUp(execType);
-            String[] input1 = {
-                    "hello\t1",
-                    "bye\t2",
-                    "\t3"
-            };
-            String[] input2 = {
-                    "hello\tworld",
-                    "good\tmorning",
-                    "\tevening"
-
-            };
-
-            String firstInput = createInputFile(execType, "a.txt", input1);
-            String secondInput = createInputFile(execType, "b.txt", input2);
-            List<Tuple> expectedResults = 
Util.getTuplesFromConstantTupleStrings(
-                    new String[] {
-                            "(null,null,null,'evening')",
-                            "(null,null,'good','morning')",
-                            "('hello',1,'hello','world')"
-                                           });
-            // with and without optional outer
-            for(int i = 0; i < 2; i++) {
-                // with schema
-                String script = "a = load '"+ Util.encodeEscape(firstInput) 
+"' as (n:chararray, a:int); " +
-                        "b = load '"+ Util.encodeEscape(secondInput) +"' as 
(n:chararray, m:chararray); ";
-                if(i == 0) {
-                    script +=  "c = join a by $0 right outer, b by $0;" ;
-                } else {
-                    script +=  "c = join a by $0 right, b by $0;" ;
-                }
-                script += "d = order c by $3;";
-                // ensure we parse correctly
-                Util.buildLp(pigServer, script);
-
-                // run query and test results only once
-                if(i == 0) {
-                    Util.registerMultiLineQuery(pigServer, script);
-                    Iterator<Tuple> it = pigServer.openIterator("d");
-                    int counter= 0;
-                    while(it.hasNext()) {
-                        assertEquals(expectedResults.get(counter++), 
it.next());
-                    }
-                    assertEquals(expectedResults.size(), counter);
-
-                    // without schema
-                    script = "a = load '"+ Util.encodeEscape(firstInput) +"'; 
" +
-                    "b = load '"+ Util.encodeEscape(secondInput) +"'; " ;
-                    if(i == 0) {
-                        script +=  "c = join a by $0 right outer, b by $0;" ;
-                    } else {
-                        script +=  "c = join a by $0 right, b by $0;" ;
-                    }
-                    try {
-                        Util.registerMultiLineQuery(pigServer, script);
-                    } catch (Exception e) {
-                        PigException pe = LogUtils.getPigException(e);
-                        assertEquals(1105, pe.getErrorCode());
-                    }
-                }
-            }
-            deleteInputFile(execType, firstInput);
-            deleteInputFile(execType, secondInput);
-        }
-    }
-
-    @Test
-    public void testFullOuterJoin() throws Exception {
-        for (ExecType execType : execTypes) {
-            setUp(execType);
-            String[] input1 = {
-                    "hello\t1",
-                    "bye\t2",
-                    "\t3"
-            };
-            String[] input2 = {
-                    "hello\tworld",
-                    "good\tmorning",
-                    "\tevening"
-
-            };
-
-            String firstInput = createInputFile(execType, "a.txt", input1);
-            String secondInput = createInputFile(execType, "b.txt", input2);
-            List<Tuple> expectedResults = 
Util.getTuplesFromConstantTupleStrings(
-                    new String[] {
-                            "(null,null,null,'evening')" ,
-                            "(null,null,'good','morning')" ,
-                            "('hello',1,'hello','world')" ,
-                            "('bye',2,null,null)" ,
-                            "(null,3,null,null)"
-                                           });
-            // with and without optional outer
-            for(int i = 0; i < 2; i++) {
-                // with schema
-                String script = "a = load '"+ Util.encodeEscape(firstInput) 
+"' as (n:chararray, a:int); " +
-                        "b = load '"+ Util.encodeEscape(secondInput) +"' as 
(n:chararray, m:chararray); ";
-                if(i == 0) {
-                    script +=  "c = join a by $0 full outer, b by $0;" ;
-                } else {
-                    script +=  "c = join a by $0 full, b by $0;" ;
-                }
-                script += "d = order c by $1, $3;";
-                // ensure we parse correctly
-                Util.buildLp(pigServer, script);
-
-                // run query and test results only once
-                if(i == 0) {
-                    Util.registerMultiLineQuery(pigServer, script);
-                    Iterator<Tuple> it = pigServer.openIterator("d");
-                    int counter= 0;
-                    while(it.hasNext()) {
-                        assertEquals(expectedResults.get(counter++), 
it.next());
-                    }
-                    assertEquals(expectedResults.size(), counter);
-
-                    // without schema
-                    script = "a = load '"+ Util.encodeEscape(firstInput) +"'; 
" +
-                    "b = load '"+ Util.encodeEscape(secondInput) +"'; " ;
-                    if(i == 0) {
-                        script +=  "c = join a by $0 full outer, b by $0;" ;
-                    } else {
-                        script +=  "c = join a by $0 full, b by $0;" ;
-                    }
-                    try {
-                        Util.registerMultiLineQuery(pigServer, script);
-                    } catch (Exception e) {
-                        PigException pe = LogUtils.getPigException(e);
-                        assertEquals(1105, pe.getErrorCode());
-                    }
-                }
-            }
-            deleteInputFile(execType, firstInput);
-            deleteInputFile(execType, secondInput);
-        }
-    }
-
-    @Test
-    public void testMultiOuterJoinFailure() throws ExecException {
-        setUp(ExecType.LOCAL);
-        String[] types = new String[] { "left", "right", "full" };
-        String query = "a = load 'a.txt' as (n:chararray, a:int);\n" +
-        "b = load 'b.txt' as (n:chararray, m:chararray);\n"+
-        "c = load 'c.txt' as (n:chararray, m:chararray);\n";
-        for (int i = 0; i < types.length; i++) {
-            boolean errCaught = false;
-            try {
-                String q = query +
-                           "d = join a by $0 " + types[i] + " outer, b by $0, 
c by $0;" +
-                           "store d into 'output';";
-                Util.buildLp(pigServer, q);
-            } catch(Exception e) {
-                errCaught = true;
-                assertTrue(e.getMessage().contains("mismatched input ',' 
expecting SEMI_COLON"));
-            }
-            assertTrue(errCaught);
-
-        }
-
-    }
-
-    @Test
-    public void testNonRegularOuterJoinFailure() throws ExecException {
-        setUp(ExecType.LOCAL);
-        String query = "a = load 'a.txt' as (n:chararray, a:int); "+
-        "b = load 'b.txt' as (n:chararray, m:chararray); ";
-        String[] types = new String[] { "left", "right", "full" };
-        String[] joinTypes = new String[] { "replicated", "repl"};
-        for (int i = 0; i < types.length; i++) {
-            for(int j = 0; j < joinTypes.length; j++) {
-                boolean errCaught = false;
-                try {
-                    String q = query + "d = join a by $0 " +
-                    types[i] + " outer, b by $0 using '" + joinTypes[j] +"';" +
-                    "store d into 'output';";
-                    Util.buildLp(pigServer, q);
-
-                } catch(Exception e) {
-                    errCaught = true;
-                     // This after adding support of LeftOuter Join to 
replicated Join
-                        assertTrue(e.getMessage().contains("does not support 
(right|full) outer joins"));
-                }
-                assertEquals( i == 0 ? false : true, errCaught);
-            }
-        }
-    }
-
-    @Test
-    public void testJoinTupleFieldKey() throws Exception{
-        for (ExecType execType : execTypes) {
-            setUp(execType);
-            String[] input1 = {
-                    "(1,a)",
-                    "(2,aa)"
-            };
-            String[] input2 = {
-                    "(1,b)",
-                    "(2,bb)"
-            };
-
-            String firstInput = createInputFile(execType, "a.txt", input1);
-            String secondInput = createInputFile(execType, "b.txt", input2);
-
-            String script = "a = load '"+ Util.encodeEscape(firstInput) +"' as 
(a:tuple(a1:int, a2:chararray));" +
-                    "b = load '"+ Util.encodeEscape(secondInput) +"' as 
(b:tuple(b1:int, b2:chararray));" +
-                    "c = join a by a.a1, b by b.b1;";
-            Util.registerMultiLineQuery(pigServer, script);
-            Iterator<Tuple> it = pigServer.openIterator("c");
-
-            List<Tuple> expectedResults = 
Util.getTuplesFromConstantTupleStrings(
-                    new String[] {
-                            "((1,'a'),(1,'b'))",
-                            "((2,'aa'),(2,'bb'))"
-                            });
-            Util.checkQueryOutputsAfterSort(it, expectedResults);
-
-            deleteInputFile(execType, firstInput);
-            deleteInputFile(execType, secondInput);
-        }
-    }
-
-    @Test
-    public void testJoinNullTupleFieldKey() throws Exception{
-        for (ExecType execType : execTypes) {
-            setUp(execType);
-            String[] input1 = {
-                    "1\t",
-                    "2\taa"
-            };
-            String[] input2 = {
-                    "1\t",
-                    "2\taa"
-            };
-
-            String firstInput = createInputFile(execType, "a.txt", input1);
-            String secondInput = createInputFile(execType, "b.txt", input2);
-
-            String script = "a = load '"+ Util.encodeEscape(firstInput) +"' as 
(a1:int, a2:chararray);" +
-                    "b = load '"+ Util.encodeEscape(secondInput) +"' as 
(b1:int, b2:chararray);" +
-                    "c = join a by (a1, a2), b by (b1, b2);";
-            Util.registerMultiLineQuery(pigServer, script);
-            Iterator<Tuple> it = pigServer.openIterator("c");
-
-            List<Tuple> expectedResults = Util
-                    .getTuplesFromConstantTupleStrings(new String[] { 
"(2,'aa',2,'aa')" });
-            Util.checkQueryOutputs(it, expectedResults);
-
-            deleteInputFile(execType, firstInput);
-            deleteInputFile(execType, secondInput);
-        }
-    }
-
-    @Test
-    public void testLiteralsForJoinAlgoSpecification1() throws Exception {
-        setUp(ExecType.LOCAL);
-        String query = "a = load 'A'; " +
-                       "b = load 'B'; " +
-                       "c = Join a by $0, b by $0 using 'merge';" +
-                       "store c into 'output';";
-        LogicalPlan lp = Util.buildLp(pigServer, query);
-        Operator store = lp.getSinks().get(0);
-        LOJoin join = (LOJoin)lp.getPredecessors( store ).get(0);
-        assertEquals(JOINTYPE.MERGE, join.getJoinType());
-    }
-
-    @Test
-    public void testLiteralsForJoinAlgoSpecification2() throws Exception {
-        setUp(ExecType.LOCAL);
-        String query = "a = load 'A'; " +
-                       "b = load 'B'; " +
-                       "c = Join a by $0, b by $0 using 'hash'; "+
-                       "store c into 'output';";
-        LogicalPlan lp = Util.buildLp(pigServer, query);
-        Operator store = lp.getSinks().get(0);
-        LOJoin join = (LOJoin) lp.getPredecessors( store ).get(0);
-        assertEquals(JOINTYPE.HASH, join.getJoinType());
-    }
-
-    @Test
-    public void testLiteralsForJoinAlgoSpecification5() throws Exception {
-        setUp(ExecType.LOCAL);
-        String query = "a = load 'A'; " +
-                       "b = load 'B'; " +
-                       "c = Join a by $0, b by $0 using 'default'; "+
-                       "store c into 'output';";
-        LogicalPlan lp = Util.buildLp(pigServer, query);
-        Operator store = lp.getSinks().get(0);
-        LOJoin join = (LOJoin) lp.getPredecessors( store ).get(0);
-        assertEquals(JOINTYPE.HASH, join.getJoinType());
-    }
-
-    @Test
-    public void testLiteralsForJoinAlgoSpecification3() throws Exception {
-        setUp(ExecType.LOCAL);
-        String query = "a = load 'A'; " +
-                       "b = load 'B'; " +
-                       "c = Join a by $0, b by $0 using 'repl'; "+
-                       "store c into 'output';";
-        LogicalPlan lp = Util.buildLp(pigServer, query);
-        Operator store = lp.getSinks().get(0);
-        LOJoin join = (LOJoin) lp.getPredecessors( store ).get(0);
-        assertEquals(JOINTYPE.REPLICATED, join.getJoinType());
-    }
-
-    @Test
-    public void testLiteralsForJoinAlgoSpecification4() throws Exception {
-        setUp(ExecType.LOCAL);
-        String query = "a = load 'A'; " +
-                       "b = load 'B'; " +
-                       "c = Join a by $0, b by $0 using 'replicated'; "+
-                       "store c into 'output';";
-        LogicalPlan lp = Util.buildLp(pigServer, query);
-        Operator store = lp.getSinks().get(0);
-        LOJoin join = (LOJoin) lp.getPredecessors( store ).get(0);
-        assertEquals(JOINTYPE.REPLICATED, join.getJoinType());
-    }
-
-    // See: https://issues.apache.org/jira/browse/PIG-3093
-    @Test
-    public void testIndirectSelfJoinRealias() throws Exception {
-        setUp(ExecType.LOCAL);
-        Data data = resetData(pigServer);
-
-        Set<Tuple> tuples = Sets.newHashSet(tuple("a"), tuple("b"), 
tuple("c"));
-        data.set("foo", Utils.getSchemaFromString("field1:chararray"), tuples);
-        pigServer.registerQuery("A = load 'foo' using mock.Storage();");
-        pigServer.registerQuery("B = foreach A generate *;");
-        pigServer.registerQuery("C = join A by field1, B by field1;");
-        assertEquals(Utils.getSchemaFromString("A::field1:chararray, 
B::field1:chararray"), pigServer.dumpSchema("C"));
-        pigServer.registerQuery("D = foreach C generate B::field1, A::field1 
as field2;");
-        assertEquals(Utils.getSchemaFromString("B::field1:chararray, 
field2:chararray"), pigServer.dumpSchema("D"));
-        pigServer.registerQuery("E = foreach D generate field1, field2;");
-        assertEquals(Utils.getSchemaFromString("B::field1:chararray, 
field2:chararray"), pigServer.dumpSchema("E"));
-        pigServer.registerQuery("F = foreach E generate field2;");
-        pigServer.registerQuery("store F into 'foo_out' using 
mock.Storage();");
-        List<Tuple> out = data.get("foo_out");
-        assertEquals("Expected size was "+tuples.size()+" but was 
"+out.size(), tuples.size(), out.size());
-        for (Tuple t : out) {
-            assertTrue("Should have found tuple "+t+" in expected: "+tuples, 
tuples.remove(t));
-        }
-        assertTrue("All expected tuples should have been found, remaining: 
"+tuples, tuples.isEmpty());
-    }
-
-    @Test
-    public void testIndirectSelfJoinData() throws Exception {
-        setUp(ExecType.LOCAL);
-        Data data = resetData(pigServer);
-
-        Set<Tuple> tuples = Sets.newHashSet(tuple("a", 1), tuple("b", 2), 
tuple("c", 3));
-        data.set("foo", 
Utils.getSchemaFromString("field1:chararray,field2:int"), tuples);
-        pigServer.registerQuery("A = load 'foo' using mock.Storage();");
-        pigServer.registerQuery("B = foreach A generate field1, field2*2 as 
field2;");
-        pigServer.registerQuery("C = join A by field1, B by field1;");
-        pigServer.registerQuery("D = foreach C generate A::field1 as field1_a, 
B::field1 as field1_b, A::field2 as field2_a, B::field2 as field2_b;");
-        pigServer.registerQuery("store D into 'foo_out' using 
mock.Storage();");
-
-        Set<Tuple> expected = Sets.newHashSet(tuple("a", "a", 1, 2), 
tuple("b", "b", 2, 4), tuple("c", "c", 3, 6));
-        List<Tuple> out = data.get("foo_out");
-        assertEquals("Expected size was "+expected.size()+" but was 
"+out.size(), expected.size(), out.size());
-        for (Tuple t : out) {
-            assertTrue("Should have found tuple "+t+" in expected: "+expected, 
expected.remove(t));
-        }
-        assertTrue("All expected tuples should have been found, remaining: 
"+expected, expected.isEmpty());
+        deleteInputFile(firstInput);
+        deleteInputFile(secondInput);
     }
 }

Modified: pig/branches/spark/test/org/apache/pig/test/TestJsonLoaderStorage.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestJsonLoaderStorage.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestJsonLoaderStorage.java 
(original)
+++ pig/branches/spark/test/org/apache/pig/test/TestJsonLoaderStorage.java Tue 
Jan 27 02:27:45 2015
@@ -26,12 +26,16 @@ import java.io.File;
 import java.io.FileReader;
 import java.io.FileWriter;
 import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
 import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.pig.ExecType;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecException;
 import org.apache.pig.builtin.mock.Storage;
@@ -95,6 +99,13 @@ public class TestJsonLoaderStorage {
     "\"m\":[{\"a\":123},{\"a\":456},{\"a\":789}]" +
     "}";
 
+  private static final String arraysJson =
+    "{" +
+    "\"s\":[\"abc\",\"def\",\"ghi\"]," +
+    "\"i\":[23,45,78]," +
+    "\"f\":[23.1,45.2,78.3]" +
+    "}";
+
   private static final String nullJson =
     "{" +
     "\"a\":null," +
@@ -138,8 +149,8 @@ public class TestJsonLoaderStorage {
   private PigServer pigServer;
 
   @Before
-  public void setup() throws ExecException {
-    pigServer = new PigServer(ExecType.LOCAL);
+  public void setup() throws Exception {
+    pigServer = new PigServer(Util.getLocalTestMode());
   }
 
   private String getTempOutputPath() throws IOException {
@@ -176,7 +187,7 @@ public class TestJsonLoaderStorage {
     return pigServer.openIterator("data");
   }
 
-  private BufferedReader storeJson(String input) throws IOException {
+  private BufferedReader storeJson(String input) throws Exception {
     String pathInputFile = createInput(input);
     String pathJsonFile = getTempOutputPath();
     pigServer.registerQuery("data = load '" + pathInputFile
@@ -184,7 +195,10 @@ public class TestJsonLoaderStorage {
     pigServer.registerQuery("store data into '" + pathJsonFile
         + "' using JsonStorage();");
 
-    FileReader r = new FileReader(pathJsonFile + "/part-m-00000");
+    Path p = new Path(pathJsonFile);
+    FileSystem fs = FileSystem.get(p.toUri(), new Configuration());
+    Reader r = new InputStreamReader(fs.open(Util.getFirstPartFile(p)));
+
     BufferedReader br = new BufferedReader(r);
 
     return br;
@@ -264,6 +278,24 @@ public class TestJsonLoaderStorage {
 
   @SuppressWarnings("rawtypes")
   @Test
+  public void testJsonLoaderArrays() throws IOException{
+
+    String arraysJsonFile = createInput(arraysJson);
+    pigServer.registerQuery("data = load '" + arraysJsonFile + "' using 
JsonLoader('s:bag{a:tuple(a:chararray)}, i:bag{a:tuple(a:int)}, 
f:bag{a:tuple(a:double)}');");
+
+    Iterator<Tuple> tuples = pigServer.openIterator("data");
+    
+    Tuple t = tuples.next();
+    assertTrue(t.size()==3);
+    assertTrue(t.get(0)!=null);
+    assertTrue(t.get(1)!=null);
+    assertTrue(t.get(2)!=null);
+    assertTrue(! tuples.hasNext());
+
+  }
+
+  @SuppressWarnings("rawtypes")
+  @Test
   public void testJsonLoaderBigDecimalFormats() throws IOException{
 
     String bigDecimalJsonFile = createInput(bigDecimalJson);
@@ -307,7 +339,7 @@ public class TestJsonLoaderStorage {
   }
 
   @Test
-  public void testJsonStorage() throws IOException {
+  public void testJsonStorage() throws Exception {
     BufferedReader br = storeJson(rawInput);
     String data = br.readLine();
 
@@ -325,7 +357,7 @@ public class TestJsonLoaderStorage {
   }
 
   @Test
-  public void testJsonStorageNull() throws IOException {
+  public void testJsonStorageNull() throws Exception {
     BufferedReader br = storeJson(nullInput);
     String data = br.readLine();
 
@@ -343,7 +375,7 @@ public class TestJsonLoaderStorage {
   }
 
   @Test
-  public void testJsonLoaderStorage() throws IOException {
+  public void testJsonLoaderStorage() throws Exception {
 
     String pattInputFile = createInput(rawInput);
     String pattJsonFile = getTempOutputPath();
@@ -358,7 +390,9 @@ public class TestJsonLoaderStorage {
     pigServer.registerQuery("store json into '" + pattJson2File
         + "' using JsonStorage();");
 
-    FileReader r = new FileReader(pattJson2File + "/part-m-00000");
+    Path p = new Path(pattJson2File);
+    FileSystem fs = FileSystem.get(p.toUri(), new Configuration());
+    Reader r = new InputStreamReader(fs.open(Util.getFirstPartFile(p)));
 
     BufferedReader br = new BufferedReader(r);
     String data = br.readLine();
@@ -386,7 +420,9 @@ public class TestJsonLoaderStorage {
     pigServer.registerQuery("data = limit data 2;");
     pigServer.registerQuery("store data into '" + outPath + "' using 
JsonStorage();");
 
-    FileReader r = new FileReader(outPath + "/part-r-00000");
+    Path p = new Path(outPath);
+    FileSystem fs = FileSystem.get(p.toUri(), new Configuration());
+    Reader r = new InputStreamReader(fs.open(Util.getFirstPartFile(p)));
 
     BufferedReader br = new BufferedReader(r);
 
@@ -432,7 +468,11 @@ public class TestJsonLoaderStorage {
             "};");
     pigServer.store("uniqcnt", tempJsonFile.getAbsolutePath(), "JsonStorage");
 
-    BufferedReader br = new BufferedReader(new 
FileReader(tempJsonFile.getAbsolutePath()+ "/part-r-00000"));
+    Path p = new Path(tempJsonFile.getAbsolutePath());
+    FileSystem fs = FileSystem.get(p.toUri(), new Configuration());
+    Reader r = new InputStreamReader(fs.open(Util.getFirstPartFile(p)));
+
+    BufferedReader br = new BufferedReader(r);
     String data = br.readLine();
 
     assertEquals(jsonOutput, data);

Modified: 
pig/branches/spark/test/org/apache/pig/test/TestLOLoadDeterminedSchema.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestLOLoadDeterminedSchema.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestLOLoadDeterminedSchema.java 
(original)
+++ pig/branches/spark/test/org/apache/pig/test/TestLOLoadDeterminedSchema.java 
Tue Jan 27 02:27:45 2015
@@ -25,7 +25,6 @@ import java.io.IOException;
 import java.util.Properties;
 
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.pig.ExecType;
 import org.apache.pig.FuncSpec;
 import org.apache.pig.PigServer;
 import org.apache.pig.impl.PigContext;
@@ -86,7 +85,7 @@ public class TestLOLoadDeterminedSchema
     @Before
     public void setUp() throws Exception {
         FileLocalizer.deleteTempFiles();
-        server = new PigServer(ExecType.LOCAL, new Properties());
+        server = new PigServer(Util.getLocalTestMode(), new Properties());
 
         baseDir = new File("build/testLoLoadDeterminedSchema");
 

Modified: pig/branches/spark/test/org/apache/pig/test/TestLimitSchemaStore.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestLimitSchemaStore.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestLimitSchemaStore.java 
(original)
+++ pig/branches/spark/test/org/apache/pig/test/TestLimitSchemaStore.java Tue 
Jan 27 02:27:45 2015
@@ -19,7 +19,6 @@ package org.apache.pig.test;
 
 import java.io.File;
 
-import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.builtin.PigStorage;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
@@ -34,7 +33,7 @@ public class TestLimitSchemaStore{
 
     @Before
     public void setUp() throws Exception{
-        pigServer = new PigServer(ExecType.LOCAL);
+        pigServer = new PigServer(Util.getLocalTestMode());
     }
     
     

Modified: pig/branches/spark/test/org/apache/pig/test/TestLoad.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestLoad.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestLoad.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestLoad.java Tue Jan 27 
02:27:45 2015
@@ -33,7 +33,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
-import org.apache.pig.ExecType;
 import org.apache.pig.FuncSpec;
 import org.apache.pig.PigConfiguration;
 import org.apache.pig.PigServer;
@@ -73,7 +72,7 @@ public class TestLoad {
         FileLocalizer.deleteTempFiles();
         servers = new PigServer[] {
                     new PigServer(cluster.getExecType(), 
cluster.getProperties()),
-                    new PigServer(ExecType.LOCAL, new Properties())
+                    new PigServer(Util.getLocalTestMode(), new Properties())
         };
     }
 

Modified: pig/branches/spark/test/org/apache/pig/test/TestLocal.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestLocal.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestLocal.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestLocal.java Tue Jan 27 
02:27:45 2015
@@ -31,7 +31,6 @@ import java.util.Iterator;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.pig.EvalFunc;
-import org.apache.pig.ExecType;
 import org.apache.pig.FuncSpec;
 import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecException;
@@ -56,7 +55,7 @@ public class TestLocal {
 
     @Before
     public void setUp() throws Exception {
-        pig = new PigServer(ExecType.LOCAL);
+        pig = new PigServer(Util.getLocalTestMode());
     }
 
     @Test

Modified: pig/branches/spark/test/org/apache/pig/test/TestLocal2.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestLocal2.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestLocal2.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestLocal2.java Tue Jan 27 
02:27:45 2015
@@ -32,7 +32,6 @@ import java.util.Iterator;
 import junit.framework.Assert;
 
 import org.apache.pig.EvalFunc;
-import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.data.BagFactory;
 import org.apache.pig.data.DataBag;
@@ -45,7 +44,7 @@ public class TestLocal2 {
     private PigServer pig ;
 
     public TestLocal2() throws Throwable {
-        pig = new PigServer(ExecType.LOCAL) ;
+        pig = new PigServer(Util.getLocalTestMode()) ;
     }
 
 

Modified: pig/branches/spark/test/org/apache/pig/test/TestMacroExpansion.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestMacroExpansion.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestMacroExpansion.java 
(original)
+++ pig/branches/spark/test/org/apache/pig/test/TestMacroExpansion.java Tue Jan 
27 02:27:45 2015
@@ -29,7 +29,6 @@ import java.io.PrintWriter;
 import java.io.StringReader;
 import java.util.Properties;
 
-import org.apache.pig.ExecType;
 import org.apache.pig.PigRunner;
 import org.apache.pig.impl.PigContext;
 import org.apache.pig.parser.DryRunGruntParser;
@@ -1434,7 +1433,7 @@ public class TestMacroExpansion {
     @Test
     public void test2() throws Exception {
         String query = "A = load 'x' as ( u:int, v:long, w:bytearray); " + 
-                       "B = distinct A partition by org.apache.pig.Identity; " 
+
+                       "B = distinct A partition by 
org.apache.pig.test.utils.SimpleCustomPartitioner; " +
                        "C = sample B 0.49; " +
                        "D = order C by $0, $1; " +
                        "E = load 'y' as (d1, d2); " +
@@ -1446,7 +1445,7 @@ public class TestMacroExpansion {
         
         String expected =
             "macro_mymacro_A_0 = load 'x' as (u:int, v:long, w:bytearray);\n" +
-            "macro_mymacro_B_0 = distinct macro_mymacro_A_0 partition BY 
org.apache.pig.Identity;\n" +
+            "macro_mymacro_B_0 = distinct macro_mymacro_A_0 partition BY 
org.apache.pig.test.utils.SimpleCustomPartitioner;\n" +
             "macro_mymacro_C_0 = sample macro_mymacro_B_0 0.49;\n" +
             "macro_mymacro_D_0 = order macro_mymacro_C_0 BY $0, $1;\n" +
             "macro_mymacro_E_0 = load 'y' as (d1, d2);\n" + 
@@ -2277,7 +2276,8 @@ public class TestMacroExpansion {
     private void verify(String s, String expected) throws Exception {
         createFile("myscript.pig", s);
 
-        String[] args = { "-Dpig.import.search.path=/tmp", "-x", "local", 
"-c", "myscript.pig" };
+        String mode = Util.getLocalTestMode().toString();
+        String[] args = { "-Dpig.import.search.path=/tmp", "-x", mode, "-c", 
"myscript.pig" };
         PigStats stats = PigRunner.run(args, null);
         
         if (!stats.isSuccessful()) {
@@ -2286,7 +2286,7 @@ public class TestMacroExpansion {
         
         assertTrue(stats.isSuccessful());
         
-        String[] args2 = { "-Dpig.import.search.path=/tmp", "-x", "local", 
"-r", "myscript.pig" };
+        String[] args2 = { "-Dpig.import.search.path=/tmp", "-x", mode, "-r", 
"myscript.pig" };
         PigRunner.run(args2, null);
         
         File f2 = new File("myscript.pig.expanded");
@@ -2316,7 +2316,7 @@ public class TestMacroExpansion {
         
         try {
             BufferedReader br = new BufferedReader(new StringReader(piglatin));
-            Grunt grunt = new Grunt(br, new PigContext(ExecType.LOCAL, new 
Properties()));
+            Grunt grunt = new Grunt(br, new 
PigContext(Util.getLocalTestMode(), new Properties()));
             
             PrintWriter w = new PrintWriter(new FileWriter(scriptFile));
             w.print(piglatin);
@@ -2348,7 +2348,7 @@ public class TestMacroExpansion {
         try {
             BufferedReader br = new BufferedReader(new StringReader(piglatin));
             DryRunGruntParser parser = new DryRunGruntParser(br, scriptFile,
-                    new PigContext(ExecType.LOCAL, new Properties()));
+                    new PigContext(Util.getLocalTestMode(), new Properties()));
 
             PrintWriter w = new PrintWriter(new FileWriter(scriptFile));
             w.print(piglatin);

Modified: pig/branches/spark/test/org/apache/pig/test/TestMultiQuery.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestMultiQuery.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestMultiQuery.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestMultiQuery.java Tue Jan 27 
02:27:45 2015
@@ -25,7 +25,6 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Properties;
 
-import org.apache.pig.ExecType;
 import org.apache.pig.PigConfiguration;
 import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecJob;
@@ -53,13 +52,13 @@ public class TestMultiQuery {
                 "test/org/apache/pig/test/data/passwd2", "passwd2");
         Properties props = new Properties();
         props.setProperty(PigConfiguration.PIG_OPT_MULTIQUERY, ""+true);
-        myPig = new PigServer(ExecType.LOCAL, props);
+        myPig = new PigServer(Util.getLocalTestMode(), props);
     }
 
     @AfterClass
     public static void tearDownAfterClass() throws Exception {
-        Util.deleteFile(new PigContext(ExecType.LOCAL, new Properties()), 
"passwd");
-        Util.deleteFile(new PigContext(ExecType.LOCAL, new Properties()), 
"passwd2");
+        Util.deleteFile(new PigContext(Util.getLocalTestMode(), new 
Properties()), "passwd");
+        Util.deleteFile(new PigContext(Util.getLocalTestMode(), new 
Properties()), "passwd2");
         deleteOutputFiles();
     }
 

Modified: pig/branches/spark/test/org/apache/pig/test/TestMultiQueryBasic.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestMultiQueryBasic.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestMultiQueryBasic.java 
(original)
+++ pig/branches/spark/test/org/apache/pig/test/TestMultiQueryBasic.java Tue 
Jan 27 02:27:45 2015
@@ -38,7 +38,6 @@ import org.apache.hadoop.mapreduce.JobCo
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.pig.ExecType;
 import org.apache.pig.LoadFunc;
 import org.apache.pig.PigConfiguration;
 import org.apache.pig.PigServer;
@@ -70,13 +69,14 @@ public class TestMultiQueryBasic {
                 "test/org/apache/pig/test/data/passwd2", "passwd2");
         Properties props = new Properties();
         props.setProperty(PigConfiguration.PIG_OPT_MULTIQUERY, ""+true);
-        myPig = new PigServer(ExecType.LOCAL, props);
+        props.setProperty("tez.runtime.io.sort.mb", "10");
+        myPig = new PigServer(Util.getLocalTestMode(), props);
     }
 
     @AfterClass
     public static void tearDownAfterClass() throws Exception {
-        Util.deleteFile(new PigContext(ExecType.LOCAL, new Properties()), 
"passwd");
-        Util.deleteFile(new PigContext(ExecType.LOCAL, new Properties()), 
"passwd2");
+        Util.deleteFile(new PigContext(Util.getLocalTestMode(), new 
Properties()), "passwd");
+        Util.deleteFile(new PigContext(Util.getLocalTestMode(), new 
Properties()), "passwd2");
         deleteOutputFiles();
     }
 
@@ -438,7 +438,7 @@ public class TestMultiQueryBasic {
         // clean up any existing dirs/files
         String[] toClean = {"tmwsimam-input.txt", "foo1", "foo2", "foo3", 
"foo4" };
         for (int j = 0; j < toClean.length; j++) {
-            Util.deleteFile(new PigContext(ExecType.LOCAL, new Properties()), 
toClean[j]);
+            Util.deleteFile(new PigContext(Util.getLocalTestMode(), new 
Properties()), toClean[j]);
         }
 
         // the data below is tab delimited
@@ -504,7 +504,7 @@ public class TestMultiQueryBasic {
         }
         // cleanup
         for (int j = 0; j < toClean.length; j++) {
-            Util.deleteFile(new PigContext(ExecType.LOCAL, new Properties()), 
toClean[j]);
+            Util.deleteFile(new PigContext(Util.getLocalTestMode(), new 
Properties()), toClean[j]);
         }
 
     }
@@ -556,7 +556,7 @@ public class TestMultiQueryBasic {
      * @throws IOException
      */
     @Test
-    public void testMultiStoreWithOutputFormat() throws IOException {
+    public void testMultiStoreWithOutputFormat() throws Exception {
         Util.createLocalInputFile("input.txt", new String[] {"hello", "bye"});
         String query = "a = load 'input.txt';" +
                        "b = filter a by $0 < 10;" +
@@ -574,8 +574,8 @@ public class TestMultiQueryBasic {
         assertEquals(true, fs.exists(new 
Path("output1_checkOutputSpec_test")));
         assertEquals(true, fs.exists(new 
Path("output2_checkOutputSpec_test")));
 
-        Util.deleteFile(new PigContext(ExecType.LOCAL, new Properties()), 
"output1_checkOutputSpec_test");
-        Util.deleteFile(new PigContext(ExecType.LOCAL, new Properties()), 
"output2_checkOutputSpec_test");
+        Util.deleteFile(new PigContext(Util.getLocalTestMode(), new 
Properties()), "output1_checkOutputSpec_test");
+        Util.deleteFile(new PigContext(Util.getLocalTestMode(), new 
Properties()), "output2_checkOutputSpec_test");
     }
 
     /**

Modified: pig/branches/spark/test/org/apache/pig/test/TestMultiQueryLocal.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestMultiQueryLocal.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestMultiQueryLocal.java 
(original)
+++ pig/branches/spark/test/org/apache/pig/test/TestMultiQueryLocal.java Tue 
Jan 27 02:27:45 2015
@@ -19,8 +19,8 @@ package org.apache.pig.test;
 
 import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileReader;
 import java.io.IOException;
+import java.io.InputStreamReader;
 import java.io.StringReader;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -29,17 +29,19 @@ import java.util.Properties;
 
 import junit.framework.Assert;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.pig.ExecType;
 import org.apache.pig.PigConfiguration;
 import org.apache.pig.PigException;
 import org.apache.pig.PigServer;
+import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine;
+import org.apache.pig.backend.hadoop.executionengine.Launcher;
 import 
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
-import 
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRExecutionEngine;
-import 
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher;
 import 
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigTextOutputFormat;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
 import org.apache.pig.builtin.PigStorage;
@@ -56,17 +58,18 @@ import org.apache.pig.tools.pigscript.pa
 import org.apache.pig.tools.pigstats.JobStats;
 import org.apache.pig.tools.pigstats.PigStats;
 import org.junit.After;
+import org.junit.Assume;
 import org.junit.Before;
 import org.junit.Test;
 
 public class TestMultiQueryLocal {
 
-    private PigServer myPig;
+    protected PigServer myPig;
     private String TMP_DIR;
 
     @Before
     public void setUp() throws Exception {
-        PigContext context = new PigContext(ExecType.LOCAL, new Properties());
+        PigContext context = new PigContext(Util.getLocalTestMode(), new 
Properties());
         
context.getProperties().setProperty(PigConfiguration.PIG_OPT_MULTIQUERY, 
""+true);
         myPig = new PigServer(context);
         
myPig.getPigContext().getProperties().setProperty("pig.usenewlogicalplan", 
"false");
@@ -351,22 +354,33 @@ public class TestMultiQueryLocal {
 
     public static class PigStorageWithConfig extends PigStorage {
 
-        private static final String key = "test.key";
+        private static final String key1 = "test.key1";
+        private static final String key2 = "test.key2";
         private String suffix;
+        private String myKey;
 
-        public PigStorageWithConfig(String s) {
+        public PigStorageWithConfig(String key, String s) {
             this.suffix = s;
+            this.myKey = key;
         }
 
         @Override
         public void setStoreLocation(String location, Job job) throws 
IOException {
             super.setStoreLocation(location, job);
-            Assert.assertNull(job.getConfiguration().get(key));
+            if (myKey.equals(key1)) {
+                Assert.assertNull(job.getConfiguration().get(key2));
+            } else {
+                Assert.assertNull(job.getConfiguration().get(key1));
+            }
         }
 
         @Override
         public OutputFormat getOutputFormat() {
-            return new PigTextOutputFormatWithConfig();
+            if (myKey.equals(key1)) {
+                return new PigTextOutputFormatWithConfig1();
+            } else {
+                return new PigTextOutputFormatWithConfig2();
+            }
         }
 
         @Override
@@ -384,16 +398,30 @@ public class TestMultiQueryLocal {
         }
     }
 
-    private static class PigTextOutputFormatWithConfig extends 
PigTextOutputFormat {
+    private static class PigTextOutputFormatWithConfig1 extends 
PigTextOutputFormat {
+
+        public PigTextOutputFormatWithConfig1() {
+            super((byte) '\t');
+        }
+
+        @Override
+        public synchronized OutputCommitter 
getOutputCommitter(TaskAttemptContext context)
+                throws IOException {
+            context.getConfiguration().set(PigStorageWithConfig.key1, 
MRConfiguration.WORK_OUPUT_DIR);
+            return super.getOutputCommitter(context);
+        }
+    }
+
+    private static class PigTextOutputFormatWithConfig2 extends 
PigTextOutputFormat {
 
-        public PigTextOutputFormatWithConfig() {
+        public PigTextOutputFormatWithConfig2() {
             super((byte) '\t');
         }
 
         @Override
         public synchronized OutputCommitter 
getOutputCommitter(TaskAttemptContext context)
                 throws IOException {
-            context.getConfiguration().set(PigStorageWithConfig.key, 
MRConfiguration.WORK_OUPUT_DIR);
+            context.getConfiguration().set(PigStorageWithConfig.key2, 
MRConfiguration.WORK_OUPUT_DIR);
             return super.getOutputCommitter(context);
         }
     }
@@ -411,17 +439,20 @@ public class TestMultiQueryLocal {
                                 "using PigStorage(':') as (uname:chararray, 
passwd:chararray, uid:int,gid:int);");
             myPig.registerQuery("b = filter a by uid < 5;");
             myPig.registerQuery("c = filter a by uid > 5;");
-            myPig.registerQuery("store b into '" + TMP_DIR + 
"/Pig-TestMultiQueryLocal1' using " + PigStorageWithConfig.class.getName() + 
"('a');");
-            myPig.registerQuery("store c into '" + TMP_DIR + 
"/Pig-TestMultiQueryLocal2' using " + PigStorageWithConfig.class.getName() + 
"('b');");
+            myPig.registerQuery("store b into '" + TMP_DIR + 
"/Pig-TestMultiQueryLocal1' using " + PigStorageWithConfig.class.getName() + 
"('test.key1', 'a');");
+            myPig.registerQuery("store c into '" + TMP_DIR + 
"/Pig-TestMultiQueryLocal2' using " + PigStorageWithConfig.class.getName() + 
"('test.key2', 'b');");
 
             myPig.executeBatch();
             myPig.discardBatch();
-            BufferedReader reader = new BufferedReader(new FileReader(TMP_DIR 
+ "/Pig-TestMultiQueryLocal1/part-m-00000"));
+            FileSystem fs = FileSystem.getLocal(new Configuration());
+            BufferedReader reader = new BufferedReader(new InputStreamReader
+                    (fs.open(Util.getFirstPartFile(new Path(TMP_DIR + 
"/Pig-TestMultiQueryLocal1")))));
             String line;
             while ((line = reader.readLine())!=null) {
                 Assert.assertTrue(line.endsWith("a"));
             }
-            reader = new BufferedReader(new FileReader(TMP_DIR + 
"/Pig-TestMultiQueryLocal2/part-m-00000"));
+            reader = new BufferedReader(new InputStreamReader
+                    (fs.open(Util.getFirstPartFile(new Path(TMP_DIR + 
"/Pig-TestMultiQueryLocal2")))));
             while ((line = reader.readLine())!=null) {
                 Assert.assertTrue(line.endsWith("b"));
             }
@@ -505,8 +536,9 @@ public class TestMultiQueryLocal {
     }
 
     @Test
-    public void testMultiQueryWithIllustrate() {
+    public void testMultiQueryWithIllustrate() throws Exception {
 
+        Assume.assumeTrue("illustrate does not work in tez (PIG-3993)", 
!Util.getLocalTestMode().toString().startsWith("TEZ"));
         System.out.println("===== test multi-query with illustrate =====");
 
         try {
@@ -626,7 +658,7 @@ public class TestMultiQueryLocal {
         lp.optimize(myPig.getPigContext());
         System.out.println("===== check physical plan =====");        
 
-        PhysicalPlan pp = 
((MRExecutionEngine)myPig.getPigContext().getExecutionEngine()).compile(
+        PhysicalPlan pp = 
((HExecutionEngine)myPig.getPigContext().getExecutionEngine()).compile(
                 lp, null);
 
         Assert.assertEquals(expectedRoots, pp.getRoots().size());
@@ -638,9 +670,9 @@ public class TestMultiQueryLocal {
         return pp;
     }
 
-    private boolean executePlan(PhysicalPlan pp) throws IOException {
+    protected boolean executePlan(PhysicalPlan pp) throws IOException {
         boolean failed = true;
-        MapReduceLauncher launcher = new MapReduceLauncher();
+        Launcher launcher = MiniGenericCluster.getLauncher();
         PigStats stats = null;
         try {
             stats = launcher.launchPig(pp, "execute", myPig.getPigContext());

Modified: pig/branches/spark/test/org/apache/pig/test/TestNestedForeach.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestNestedForeach.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestNestedForeach.java 
(original)
+++ pig/branches/spark/test/org/apache/pig/test/TestNestedForeach.java Tue Jan 
27 02:27:45 2015
@@ -159,11 +159,10 @@ public class TestNestedForeach {
         " }\n");
 
         Iterator<Tuple> iter = pig.openIterator("c");
-        Tuple t = iter.next();
-        Assert.assertTrue(t.toString().equals("({(3)})"));
-
-        t = iter.next();
-        Assert.assertTrue(t.toString().equals("({(7)})"));
+        String[] expected = new String[] {
+                "({(3)})", "({(7)})" };
+        Util.checkQueryOutputsAfterSortRecursive(iter, expected,
+                
org.apache.pig.newplan.logical.Util.translateSchema(pig.dumpSchema("c")));
     }
 
     @Test
@@ -185,11 +184,11 @@ public class TestNestedForeach {
         " }\n");
 
         Iterator<Tuple> iter = pig.openIterator("c");
-        Tuple t = iter.next();
-        Assert.assertTrue(t.toString().equals("({(2),(3)})"));
+        String[] expected = new String[] {
+                "({(2),(3)})", "({(7)})" };
+        Util.checkQueryOutputsAfterSortRecursive(iter, expected,
+                
org.apache.pig.newplan.logical.Util.translateSchema(pig.dumpSchema("c")));
 
-        t = iter.next();
-        Assert.assertTrue(t.toString().equals("({(7)})"));
     }
 
     // See PIG-2563
@@ -207,10 +206,9 @@ public class TestNestedForeach {
         pig.registerQuery("C = foreach B {tmp = A.a;generate A, tmp; };");
         pig.registerQuery("D = foreach C generate A.(a,b) as v;");
         Iterator<Tuple> iter = pig.openIterator("D");
-        Tuple t = iter.next();
-        Assert.assertTrue(t.toString().equals("({(1,2)})"));
-
-        t = iter.next();
-        Assert.assertTrue(t.toString().equals("({(2,5)})"));
+        String[] expected = new String[] {
+                "({(1,2)})", "({(2,5)})" };
+        Util.checkQueryOutputsAfterSortRecursive(iter, expected,
+                
org.apache.pig.newplan.logical.Util.translateSchema(pig.dumpSchema("D")));
     }
 }

Modified: 
pig/branches/spark/test/org/apache/pig/test/TestNewPlanColumnPrune.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestNewPlanColumnPrune.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestNewPlanColumnPrune.java 
(original)
+++ pig/branches/spark/test/org/apache/pig/test/TestNewPlanColumnPrune.java Tue 
Jan 27 02:27:45 2015
@@ -32,7 +32,6 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 
-import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.impl.PigContext;
 import org.apache.pig.newplan.Operator;
@@ -46,12 +45,17 @@ import org.apache.pig.newplan.logical.ru
 import org.apache.pig.newplan.logical.rules.MapKeysPruneHelper;
 import org.apache.pig.newplan.optimizer.PlanOptimizer;
 import org.apache.pig.newplan.optimizer.Rule;
+import org.junit.Before;
 import org.junit.Test;
 
 public class TestNewPlanColumnPrune {
     LogicalPlan plan = null;
-    PigContext pc = new PigContext(ExecType.LOCAL, new Properties());
+    PigContext pc;
 
+    @Before
+    public void setUp() throws Exception {
+        pc = new PigContext(Util.getLocalTestMode(), new Properties());
+    }
     private LogicalPlan buildPlan(String query) throws Exception{
         PigServer pigServer = new PigServer( pc );
         return Util.buildLp(pigServer, query);

Modified: pig/branches/spark/test/org/apache/pig/test/TestNullConstant.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestNullConstant.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestNullConstant.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestNullConstant.java Tue Jan 
27 02:27:45 2015
@@ -27,7 +27,6 @@ import java.io.IOException;
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecException;
 import org.apache.pig.builtin.mock.Storage.Data;
@@ -43,7 +42,7 @@ public class TestNullConstant {
 
     @Before
     public void setUp() throws Exception{
-        pigServer = new PigServer(ExecType.LOCAL);
+        pigServer = new PigServer(Util.getLocalTestMode());
     }
 
     @Test

Modified: pig/branches/spark/test/org/apache/pig/test/TestOrderBy3.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestOrderBy3.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestOrderBy3.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestOrderBy3.java Tue Jan 27 
02:27:45 2015
@@ -29,7 +29,6 @@ import java.util.Random;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.builtin.mock.Storage.Data;
 import org.apache.pig.data.DataType;
@@ -53,7 +52,7 @@ public class TestOrderBy3 {
 
         log.info("Setting up");
 
-        pigServer = new PigServer(ExecType.LOCAL);
+        pigServer = new PigServer(Util.getLocalTestMode());
         data = resetData(pigServer);
 
         Random r = new Random();

Modified: pig/branches/spark/test/org/apache/pig/test/TestPONegative.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestPONegative.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestPONegative.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestPONegative.java Tue Jan 27 
02:27:45 2015
@@ -25,7 +25,6 @@ import java.util.Iterator;
 import java.util.Properties;
 import java.util.Random;
 
-import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecException;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.PONegative;
@@ -293,7 +292,7 @@ public class TestPONegative {
 
     @Test
     public void testPONegType() throws Exception {
-        PigServer pig = new PigServer(ExecType.LOCAL, new Properties());
+        PigServer pig = new PigServer(Util.getLocalTestMode(), new 
Properties());
         File f = Util.createInputFile("tmp", "", new String[] {"a", "b", "c"});
         pig.registerQuery("a = load '"
                 + Util.encodeEscape(Util.generateURI(f.toString(), 
pig.getPigContext())) + "';");

Modified: pig/branches/spark/test/org/apache/pig/test/TestPOPartialAggPlan.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestPOPartialAggPlan.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestPOPartialAggPlan.java 
(original)
+++ pig/branches/spark/test/org/apache/pig/test/TestPOPartialAggPlan.java Tue 
Jan 27 02:27:45 2015
@@ -23,28 +23,28 @@ import static org.junit.Assert.assertNul
 
 import java.util.Iterator;
 
-import org.apache.pig.ExecType;
 import org.apache.pig.PigConfiguration;
 import org.apache.pig.PigServer;
-import org.apache.pig.backend.executionengine.ExecException;
 import 
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPartialAgg;
 import org.apache.pig.impl.PigContext;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.junit.Test;
 
 /**
  * Test POPartialAgg runtime
  */
+@Ignore
 public class TestPOPartialAggPlan  {
-    private static PigContext pc;
-    private static PigServer ps;
+    protected static PigContext pc;
+    protected static PigServer ps;
 
     @Before
-    public void setUp() throws ExecException {
-        ps = new PigServer(ExecType.LOCAL);
+    public void setUp() throws Exception {
+        ps = new PigServer(Util.getLocalTestMode());
         pc = ps.getPigContext();
         pc.connect();
     }
@@ -89,7 +89,7 @@ public class TestPOPartialAggPlan  {
         return findPOPartialAgg(mapPlan);
     }
 
-    private String getGByQuery() {
+    protected String getGByQuery() {
         return "l = load 'x' as (a,b,c);" +
                 "g = group l by a;" +
                 "f = foreach g generate group, COUNT(l.b);";
@@ -122,8 +122,8 @@ public class TestPOPartialAggPlan  {
         assertNull("POPartialAgg should be absent", findPOPartialAgg(mrp));
     }
 
-    private PhysicalOperator findPOPartialAgg(PhysicalPlan mapPlan) {
-        Iterator<PhysicalOperator> it = mapPlan.iterator();
+    protected PhysicalOperator findPOPartialAgg(PhysicalPlan plan) {
+        Iterator<PhysicalOperator> it = plan.iterator();
         while(it.hasNext()){
             PhysicalOperator op = it.next();
             if(op instanceof POPartialAgg){
@@ -132,7 +132,4 @@ public class TestPOPartialAggPlan  {
         }
         return null;
     }
-
-
-
 }

Modified: pig/branches/spark/test/org/apache/pig/test/TestParamSubPreproc.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestParamSubPreproc.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestParamSubPreproc.java 
(original)
+++ pig/branches/spark/test/org/apache/pig/test/TestParamSubPreproc.java Tue 
Jan 27 02:27:45 2015
@@ -1356,12 +1356,39 @@ public class TestParamSubPreproc {
         File outputFile = File.createTempFile("tmp", "");
         outputFile.delete();
         PigContext pc = new PigContext(ExecType.LOCAL, new Properties());
-        String command = "a = load '" + Util.generateURI(inputFile.toString(), 
pc)  + "' as ($param1:chararray, $param2:int);\n"
-                + "store a into '" + Util.generateURI(outputFile.toString(), 
pc) + "';\n"
+        String command = "%default agelimit `echo 15`\n"
+                + "rmf $outputFile;\n"
+                + "a = load '" + Util.generateURI(inputFile.toString(), pc)  + 
"' as ($param1:chararray, $param2:int);\n"
+                + "b = filter a by age > $agelimit;"
+                + "store b into '$outputFile';\n"
                 + "quit\n";
         System.setProperty("jline.WindowsTerminal.directConsole", "false");
         System.setIn(new ByteArrayInputStream(command.getBytes()));
-        org.apache.pig.PigRunner.run(new String[] {"-x", "local", "-p", 
"param1=name", "-p", "param2=age"}, null);
+        org.apache.pig.PigRunner.run(new String[] {"-x", "local", "-p", 
"param1=name", "-p", "param2=age", "-p", "outputFile=" + 
Util.generateURI(outputFile.toString(), pc)}, null);
+        File[] partFiles = outputFile.listFiles(new FilenameFilter() {
+            public boolean accept(File dir, String name) { 
+            return name.startsWith("part");
+        }
+        });
+        String resultContent = Util.readFile(partFiles[0]);
+        assertEquals(resultContent, "jenny\t20\n");
+    }
+
+    @Test
+    public void testGruntMultilineDefine() throws Exception{
+        log.info("Starting test testGruntMultilineDefine()");
+        File inputFile = Util.createFile(new 
String[]{"daniel\t10","jenny\t20"});
+        File outputFile = File.createTempFile("tmp", "");
+        outputFile.delete();
+        PigContext pc = new PigContext(ExecType.LOCAL, new Properties());
+        String command = "DEFINE process(input_file) returns data {\n" +
+                "$data = load '$input_file' using PigStorage(',');};\n" +
+                "b = process('" + Util.generateURI(inputFile.toString(), pc)  
+ "');\n" +
+                "store b into '" + Util.generateURI(outputFile.toString(), pc) 
+ "';" +
+                "quit\n";
+        System.setProperty("jline.WindowsTerminal.directConsole", "false");
+        System.setIn(new ByteArrayInputStream(command.getBytes()));
+        org.apache.pig.PigRunner.run(new String[] {"-x", "local"}, null);
         File[] partFiles = outputFile.listFiles(new FilenameFilter() {
             public boolean accept(File dir, String name) { 
             return name.startsWith("part");

Modified: pig/branches/spark/test/org/apache/pig/test/TestParser.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestParser.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestParser.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestParser.java Tue Jan 27 
02:27:45 2015
@@ -16,7 +16,6 @@
 
 package org.apache.pig.test;
 
-import static org.apache.pig.ExecType.LOCAL;
 import static org.apache.pig.builtin.mock.Storage.resetData;
 import static org.apache.pig.builtin.mock.Storage.tuple;
 import static org.junit.Assert.assertEquals;
@@ -32,7 +31,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
-import org.apache.pig.backend.executionengine.ExecException;
 import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
 import 
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
 import org.apache.pig.builtin.mock.Storage;
@@ -64,17 +62,17 @@ public class TestParser {
             cluster.shutDown();
     }
 
-    public void setUp(ExecType execType) throws ExecException{
+    public void setUp(ExecType execType) throws Exception{
         Util.resetStateForExecModeSwitch();
         if (execType == cluster.getExecType()) {
             pigServer = new PigServer(cluster.getExecType(), 
cluster.getProperties());
         } else {
-            pigServer = new PigServer(LOCAL);
+            pigServer = new PigServer(Util.getLocalTestMode());
         }
     }
 
     @Test(expected = IOException.class)
-    public void testLoadingNonexistentFile() throws ExecException, IOException 
{
+    public void testLoadingNonexistentFile() throws Exception {
         for (ExecType execType : execTypes) {
             setUp(execType);
             pigServer.registerQuery("vals = load 'nonexistentfile';");
@@ -83,7 +81,7 @@ public class TestParser {
     }
 
     @Test
-    public void testRemoteServerList() throws ExecException, IOException {
+    public void testRemoteServerList() throws Exception {
         for (ExecType execType : execTypes) {
             setUp(execType);
             Properties pigProperties = 
pigServer.getPigContext().getProperties();
@@ -129,8 +127,8 @@ public class TestParser {
     }
 
     @Test
-    public void testRemoteServerList2() throws ExecException, IOException {
-        pigServer = new PigServer(LOCAL);
+    public void testRemoteServerList2() throws Exception {
+        pigServer = new PigServer(Util.getLocalTestMode());
         Properties pigProperties = pigServer.getPigContext().getProperties();
         pigProperties.setProperty("fs.default.name", "hdfs://a.com:8020");
         Configuration conf;
@@ -175,7 +173,7 @@ public class TestParser {
 
     @Test
     public void testRestrictedColumnNamesWhitelist() throws Exception {
-        pigServer = new PigServer(LOCAL);
+        pigServer = new PigServer(Util.getLocalTestMode());
         Data data = resetData(pigServer);
 
         Set<Tuple> tuples = Sets.newHashSet(tuple(1),tuple(2),tuple(3));

Modified: pig/branches/spark/test/org/apache/pig/test/TestPigContext.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestPigContext.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestPigContext.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestPigContext.java Tue Jan 27 
02:27:45 2015
@@ -28,18 +28,30 @@ import java.util.Properties;
 import java.util.Random;
 
 import org.apache.hadoop.mapred.FileAlreadyExistsException;
-import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import 
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.PigContext;
 import org.apache.pig.impl.io.FileLocalizer;
 import org.apache.pig.impl.util.JavaCompilerHelper;
+import org.apache.pig.test.junit.OrderedJUnit4Runner;
+import org.apache.pig.test.junit.OrderedJUnit4Runner.TestOrder;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.runner.RunWith;
 
+//Need to run testImportList first due to TEZ-1802
+@RunWith(OrderedJUnit4Runner.class)
+@TestOrder({
+    "testImportList",
+    "testScriptFiles",
+    "testSetProperties_way_num01",
+    "testSetProperties_way_num02",
+    "testSetProperties_way_num03",
+    "testHadoopExceptionCreation"
+})
 public class TestPigContext {
     private static final String TMP_DIR_PROP = "/tmp/hadoop-hadoop";
     private static final String FS_NAME = "file:///";
@@ -60,7 +72,7 @@ public class TestPigContext {
     @Before
     public void setUp() throws Exception {
         Util.resetStateForExecModeSwitch();
-        pigContext = new PigContext(ExecType.LOCAL, getProperties());
+        pigContext = new PigContext(Util.getLocalTestMode(), getProperties());
         input = File.createTempFile("PigContextTest-", ".txt");
     }
 
@@ -85,7 +97,7 @@ public class TestPigContext {
      */
     @Test
     public void testSetProperties_way_num02() throws Exception {
-        PigServer pigServer = new PigServer(ExecType.LOCAL, getProperties());
+        PigServer pigServer = new PigServer(Util.getLocalTestMode(), 
getProperties());
         registerAndStore(pigServer);
 
         check_asserts(pigServer);
@@ -204,7 +216,7 @@ public class TestPigContext {
     @SuppressWarnings("deprecation")
     @Test
     public void testScriptFiles() throws Exception {
-        PigContext pc = new PigContext(ExecType.LOCAL, getProperties());
+        PigContext pc = new PigContext(Util.getLocalTestMode(), 
getProperties());
         final int n = pc.scriptFiles.size();
         pc.addScriptFile("test/path-1824");
         assertEquals("test" + File.separator + "path-1824", 
pc.getScriptFiles().get("test/path-1824").toString());

Modified: pig/branches/spark/test/org/apache/pig/test/TestPigRunner.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestPigRunner.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestPigRunner.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestPigRunner.java Tue Jan 27 
02:27:45 2015
@@ -196,10 +196,14 @@ public class TestPigRunner {
             String[] args = { "-Dstop.on.failure=true", 
"-Dopt.multiquery=false", "-Daggregate.warning=false", "-x", execType, PIG_FILE 
};
             PigStats stats = PigRunner.run(args, new 
TestNotificationListener(execType));
 
-            assertTrue(stats instanceof EmptyPigStats);
             assertTrue(stats.isSuccessful());
-            assertEquals(0, stats.getNumberJobs());
-            assertEquals(stats.getJobGraph().size(), 0);
+            if (execType.toString().startsWith("tez")) {
+                assertEquals(1, stats.getNumberJobs());
+                assertEquals(stats.getJobGraph().size(), 1);
+            } else {
+                assertEquals(2, stats.getNumberJobs());
+                assertEquals(stats.getJobGraph().size(), 2);
+            }
 
             Configuration conf = 
ConfigurationUtil.toConfiguration(stats.getPigProperties());
             assertTrue(conf.getBoolean("stop.on.failure", false));
@@ -513,7 +517,7 @@ public class TestPigRunner {
 
     @Test
     public void testIsTempFile() throws Exception {
-        PigContext context = new PigContext(ExecType.LOCAL, new Properties());
+        PigContext context = new PigContext(Util.getLocalTestMode(), new 
Properties());
         context.connect();
         for (int i=0; i<100; i++) {
             String file = FileLocalizer.getTemporaryPath(context).toString();


Reply via email to