Author: olga
Date: Wed Jun  9 18:37:54 2010
New Revision: 953109

URL: http://svn.apache.org/viewvc?rev=953109&view=rev
Log:
PIG-1441: new test targets (olgan)

Added:
    hadoop/pig/trunk/test/org/apache/pig/test/TestJoinSmoke.java
    hadoop/pig/trunk/test/smoke-tests
    hadoop/pig/trunk/test/unit-tests
Modified:
    hadoop/pig/trunk/CHANGES.txt
    hadoop/pig/trunk/build.xml

Modified: hadoop/pig/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/CHANGES.txt?rev=953109&r1=953108&r2=953109&view=diff
==============================================================================
--- hadoop/pig/trunk/CHANGES.txt (original)
+++ hadoop/pig/trunk/CHANGES.txt Wed Jun  9 18:37:54 2010
@@ -24,6 +24,8 @@ INCOMPATIBLE CHANGES
 
 IMPROVEMENTS
 
+PIG-1441: new test targets (olgan)
+
 PIG-282: Custom Partitioner (aniket486 via daijy)
 
 PIG-283: Allow to set arbitrary jobconf key-value pairs inside pig program 
(hashutosh)

Modified: hadoop/pig/trunk/build.xml
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/build.xml?rev=953109&r1=953108&r2=953109&view=diff
==============================================================================
--- hadoop/pig/trunk/build.xml (original)
+++ hadoop/pig/trunk/build.xml Wed Jun  9 18:37:54 2010
@@ -80,6 +80,8 @@
     <property name="test.timeout" value="2700000" />
     <property name="test.junit.output.format" value="plain" />
     <property name="test.commit.file" value="${test.src.dir}/commit-tests"/>
+    <property name="test.unit.file" value="${test.src.dir}/unit-tests"/>
+    <property name="test.smoke.file" value="${test.src.dir}/smoke-tests"/>
     <property name="test.all.file" value="${test.src.dir}/all-tests"/>
 
 
@@ -536,6 +538,14 @@
         <macro-test-runner test.file="${test.commit.file}" />
     </target>
 
+    <target name="test-unit" depends="compile-test,jar-withouthadoop" 
description="Run all true unit tests">
+        <macro-test-runner test.file="${test.unit.file}" />
+    </target>
+
+    <target name="test-smoke" depends="compile-test,jar-withouthadoop" 
description="Run 30 min smoke tests">
+        <macro-test-runner test.file="${test.smoke.file}" />
+    </target>
+
     <macrodef name="macro-test-runner">
       <attribute name="test.file" />
       <sequential>

Added: hadoop/pig/trunk/test/org/apache/pig/test/TestJoinSmoke.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/test/org/apache/pig/test/TestJoinSmoke.java?rev=953109&view=auto
==============================================================================
--- hadoop/pig/trunk/test/org/apache/pig/test/TestJoinSmoke.java (added)
+++ hadoop/pig/trunk/test/org/apache/pig/test/TestJoinSmoke.java Wed Jun  9 
18:37:54 2010
@@ -0,0 +1,257 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.pig.test;
+
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.PrintWriter;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.HashMap;
+import java.util.Hashtable;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.pig.EvalFunc;
+import org.apache.pig.ExecType;
+import org.apache.pig.FuncSpec;
+import org.apache.pig.LoadFunc;
+import org.apache.pig.PigServer;
+import org.apache.pig.backend.executionengine.ExecException;
+import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
+import 
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigMapReduce;
+import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus;
+import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result;
+import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLoad;
+import org.apache.pig.builtin.PigStorage;
+import org.apache.pig.data.BagFactory;
+import org.apache.pig.data.DataBag;
+import org.apache.pig.data.DataByteArray;
+import org.apache.pig.data.Tuple;
+import org.apache.pig.data.TupleFactory;
+import org.apache.pig.impl.PigContext;
+import org.apache.pig.impl.io.FileSpec;
+import org.apache.pig.impl.logicalLayer.schema.Schema;
+import org.apache.pig.impl.plan.OperatorKey;
+import org.apache.pig.test.utils.TestHelper;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+...@runwith(JUnit4.class)
+public class TestJoinSmoke extends TestCase{
+    private static final String FR_INPUT_FILE = "testFrJoinInput.txt";
+
+    private static final String SKEW_INPUT_FILE1 = "SkewedJoinInput1.txt";
+    private static final String SKEW_INPUT_FILE2 = "SkewedJoinInput2.txt";
+    private static final String SKEW_INPUT_FILE5 = "SkewedJoinInput5.txt";
+
+    private PigServer pigServer;
+    private static MiniCluster cluster = MiniCluster.buildCluster();
+    private File tmpFile;
+    
+    public TestJoinSmoke() throws ExecException, IOException{
+        pigServer = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
+//        pigServer = new PigServer(ExecType.LOCAL);
+        
+    }
+    
+    @Before
+    public void setUp() throws Exception {
+        setupFRJoin();
+       setupSkewJoin();
+    }
+
+    private void setupFRJoin() throws Exception {
+        int LOOP_SIZE = 2;
+        String[] input = new String[2*LOOP_SIZE];
+        int k = 0;
+        for(int i = 1; i <= LOOP_SIZE; i++) {
+            String si = i + "";
+            for(int j=1;j<=LOOP_SIZE;j++)
+                input[k++] = si + "\t" + j;
+        }
+        Util.createInputFile(cluster, FR_INPUT_FILE, input);
+    }
+
+    private void setupSkewJoin() throws IOException {
+        PrintWriter w = new PrintWriter(new FileWriter(SKEW_INPUT_FILE1));
+
+        int k = 0;
+        for(int j=0; j<120; j++) {
+                w.println("100\tapple1\taaa" + k);
+            k++;
+            w.println("200\torange1\tbbb" + k);
+            k++;
+            w.println("300\tstrawberry\tccc" + k);
+            k++;
+        }
+
+        w.close();
+
+        PrintWriter w2 = new PrintWriter(new FileWriter(SKEW_INPUT_FILE2));
+        w2.println("100\tapple1");
+        w2.println("100\tapple2");
+        w2.println("100\tapple2");
+        w2.println("200\torange1");
+        w2.println("200\torange2");
+        w2.println("300\tstrawberry");
+        w2.println("400\tpear");
+
+        w2.close();
+
+        // Create a file with null keys
+        PrintWriter w5 = new PrintWriter(new FileWriter(SKEW_INPUT_FILE5));
+        for(int i=0; i < 10; i++) {
+                w5.println("\tapple1");
+        }
+        w5.println("100\tapple2");
+        for(int i=0; i < 10; i++) {
+                w5.println("\torange1");
+        }
+        w5.println("\t");
+        w5.println("100\t");
+        w5.close();
+
+       Util.copyFromLocalToCluster(cluster,SKEW_INPUT_FILE1,SKEW_INPUT_FILE1);
+        Util.copyFromLocalToCluster(cluster,SKEW_INPUT_FILE2,SKEW_INPUT_FILE2);
+        Util.copyFromLocalToCluster(cluster,SKEW_INPUT_FILE5,SKEW_INPUT_FILE5);
+    }
+
+    @AfterClass
+    public static void oneTimeTearDown() throws Exception {
+        cluster.shutDown();
+    }
+    
+    @After
+    public void tearDown() throws Exception {
+        Util.deleteFile(cluster, FR_INPUT_FILE);
+        tearDownSkewJoin();
+    }
+
+    private void tearDownSkewJoin() throws Exception {
+        new File(SKEW_INPUT_FILE1).delete();
+        new File(SKEW_INPUT_FILE2).delete();
+        new File(SKEW_INPUT_FILE5).delete();
+        Util.deleteDirectory(new File("skewedjoin"));
+
+        Util.deleteFile(cluster,SKEW_INPUT_FILE1);
+        Util.deleteFile(cluster,SKEW_INPUT_FILE2);
+        Util.deleteFile(cluster,SKEW_INPUT_FILE5);
+    }
+
+    @Test
+    public void testFRJoin() throws IOException{
+        pigServer.registerQuery("A = LOAD '" + FR_INPUT_FILE + "' as 
(x:int,y:int);");
+        pigServer.registerQuery("B = LOAD '" + FR_INPUT_FILE + "' as 
(x:int,y:int);");
+        DataBag dbfrj = BagFactory.getInstance().newDefaultBag(), dbshj = 
BagFactory.getInstance().newDefaultBag();
+        {
+            pigServer.registerQuery("C = join A by $0, B by $0 using 
\"replicated\";");
+            Iterator<Tuple> iter = pigServer.openIterator("C");
+            
+            while(iter.hasNext()) {
+                dbfrj.add(iter.next());
+            }
+        }
+        {
+            pigServer.registerQuery("C = join A by $0, B by $0;");
+            Iterator<Tuple> iter = pigServer.openIterator("C");
+            
+            while(iter.hasNext()) {
+                dbshj.add(iter.next());
+            }
+        }
+        Assert.assertTrue(dbfrj.size()>0 && dbshj.size()>0);
+        Assert.assertEquals(true, TestHelper.compareBags(dbfrj, dbshj));
+    }
+
+    @Test
+    public void testSkewedJoinWithGroup() throws IOException{
+        pigServer.registerQuery("A = LOAD '" + SKEW_INPUT_FILE1 + "' as (id, 
name, n);");
+        pigServer.registerQuery("B = LOAD '" + SKEW_INPUT_FILE2 + "' as (id, 
name);");
+        pigServer.registerQuery("C = GROUP A by id;");
+        pigServer.registerQuery("D = GROUP B by id;");
+
+        DataBag dbfrj = BagFactory.getInstance().newDefaultBag(), dbshj = 
BagFactory.getInstance().newDefaultBag();
+        {
+            pigServer.registerQuery("E = join C by group, D by group using 
\"skewed\" parallel 5;");
+            Iterator<Tuple> iter = pigServer.openIterator("E");
+
+            while(iter.hasNext()) {
+                dbfrj.add(iter.next());
+            }
+        }
+        {
+            pigServer.registerQuery("E = join C by group, D by group;");
+            Iterator<Tuple> iter = pigServer.openIterator("E");
+
+            while(iter.hasNext()) {
+                dbshj.add(iter.next());
+            }
+        }
+        Assert.assertTrue(dbfrj.size()>0 && dbshj.size()>0);
+        Assert.assertEquals(true, TestHelper.compareBags(dbfrj, dbshj));
+    }
+
+    @Test
+    public void testSkewedJoinOuter() throws IOException {
+        pigServer.registerQuery("A = LOAD '" + SKEW_INPUT_FILE5 + "' as 
(id,name);");
+        pigServer.registerQuery("B = LOAD '" + SKEW_INPUT_FILE5 + "' as 
(id,name);");
+        try {
+            DataBag dbfrj = BagFactory.getInstance().newDefaultBag();
+            {
+                pigServer.registerQuery("C = join A by id left, B by id using 
\"skewed\";");
+                Iterator<Tuple> iter = pigServer.openIterator("C");
+
+                while(iter.hasNext()) {
+                    dbfrj.add(iter.next());
+                }
+            }
+            {
+                pigServer.registerQuery("C = join A by id right, B by id using 
\"skewed\";");
+                Iterator<Tuple> iter = pigServer.openIterator("C");
+
+                while(iter.hasNext()) {
+                    dbfrj.add(iter.next());
+                }
+            }
+            {
+                pigServer.registerQuery("C = join A by id full, B by id using 
\"skewed\";");
+                Iterator<Tuple> iter = pigServer.openIterator("C");
+
+                while(iter.hasNext()) {
+                    dbfrj.add(iter.next());
+                }
+            }
+        } catch(Exception e) {
+            System.out.println(e.getMessage());
+            e.printStackTrace();
+            fail("Should support outer join in skewed join");
+        }
+        return;
+    }
+}

Added: hadoop/pig/trunk/test/smoke-tests
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/test/smoke-tests?rev=953109&view=auto
==============================================================================
--- hadoop/pig/trunk/test/smoke-tests (added)
+++ hadoop/pig/trunk/test/smoke-tests Wed Jun  9 18:37:54 2010
@@ -0,0 +1,4 @@
+**/TestCommit.java
+**/TestMultiQueryLocal.java
+**/TestStreaming.java
+**/TestJoinSmoke.java

Added: hadoop/pig/trunk/test/unit-tests
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/test/unit-tests?rev=953109&view=auto
==============================================================================
--- hadoop/pig/trunk/test/unit-tests (added)
+++ hadoop/pig/trunk/test/unit-tests Wed Jun  9 18:37:54 2010
@@ -0,0 +1,83 @@
+**/TestAdd.java
+**/TestAlgebraicEvalLocal.java
+**/TestBagFormat.java
+**/TestBoolean.java
+**/TestCmdLineParser.java
+**/TestConstExpr.java
+**/TestConversions.java
+**/TestDataBag.java
+**/TestDataModel.java
+**/TestDeleteOnFail.java
+**/TestDivide.java
+**/TestEqualTo.java
+**/TestEvalPipelineLocal.java
+**/TestExperimentalColumnPrune.java
+**/TestExperimentalFilterAboveForeach.java
+**/TestExperimentalFilterRule.java
+**/TestExperimentalListener.java
+**/TestExperimentalLogicalOptimizer.java
+**/TestExperimentalLogToPhyTranslationVisitor.java
+**/TestExperimentalOperatorPlan.java
+**/TestExperimentalPruneMapKeys.java
+**/TestExperimentalRule.java
+**/TestFilter.java
+**/TestForEach.java
+**/TestForEachNestedPlanLocal.java
+**/TestFuncSpec.java
+**/TestGreaterThan.java
+**/TestGTOrEqual.java
+**/TestInstantiateFunc.java
+**/TestInvoker.java
+**/TestLessThan.java
+**/TestLoadFunc.java
+**/TestLocal2.java
+**/TestLocal.java
+**/TestLocalPOSplit.java
+**/TestLogicalOptimizer.java
+**/TestLogicalPlanMigrationVisitor.java
+**/TestLogToPhyCompiler.java
+**/TestLTOrEqual.java
+**/TestMod.java
+**/TestMultiply.java
+**/TestMultiQueryLocal.java
+**/TestNotEqualTo.java
+**/TestNull.java
+**/TestOperatorPlan.java
+**/TestPackage.java
+**/TestParamSubPreproc.java
+**/TestPartitionFilterOptimization.java
+**/TestPhyOp.java
+**/TestPigScriptParser.java
+**/TestPigSplit.java
+**/TestPigStats.java
+**/TestPinOptions.java
+**/TestPOBinCond.java
+**/TestPOCast.java
+**/TestPOCogroup.java
+**/TestPOCross.java
+**/TestPODistinct.java
+**/TestPOGenerate.java
+**/TestPOMapLookUp.java
+**/TestPOSort.java
+**/TestPOUserFunc.java
+**/TestProjectionMap.java
+**/TestProject.java
+**/TestPruneColumn.java
+**/TestPruneColumnNewLogicalPlan.java
+**/TestPushDownForeachFlatten.java
+**/TestPushUpFilter.java
+**/TestRegexp.java
+**/TestRelevantFields.java
+**/TestRequiredFields.java
+**/TestResourceSchema.java
+**/TestRewire.java
+**/TestSchemaParser.java
+**/TestSchemaUtil.java
+**/TestStreamingLocal.java
+**/TestSubtract.java
+**/TestTextDataParser.java
+**/TestTupleFormat.java
+**/TestTypeChecking.java
+**/TestTypeCheckingValidator.java
+**/TestTypeCheckingValidatorNoSchema.java
+**/TestUDFWithoutParameter.java


Reply via email to