Modified: 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LOCogroup.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LOCogroup.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LOCogroup.java 
(original)
+++ 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LOCogroup.java 
Tue Jan 27 02:27:45 2015
@@ -37,13 +37,13 @@ import org.apache.pig.newplan.logical.ex
 import 
org.apache.pig.newplan.logical.relational.LogicalSchema.LogicalFieldSchema;
 
 public class LOCogroup extends LogicalRelationalOperator {
-    
+
     // List of booleans specifying if any of the cogroups is inner
     private boolean[] mIsInner;
-    
+
     // List of expressionPlans according to input
     private MultiMap<Integer,LogicalExpressionPlan> mExpressionPlans;
-    
+
     /**
      * Enum for the type of group
      */
@@ -52,39 +52,92 @@ public class LOCogroup extends LogicalRe
         COLLECTED,  // Collected group
         MERGE       // Map-side CoGroup on sorted data
     };
-    
+
     private GROUPTYPE mGroupType;
-    
-    private LogicalFieldSchema groupKeyUidOnlySchema; 
-    
+
+    private LogicalFieldSchema groupKeyUidOnlySchema;
+
     /*
      * This is a map storing Uids which have been generated for an input
      * This map is required to make the uids persistant between calls of
      * resetSchema and getSchema
      */
     private Map<Integer,Long> generatedInputUids = new HashMap<Integer,Long>();
-    
+
     final static String GROUP_COL_NAME = "group";
-    
-    /** 
+
+    /**
      * static constant to refer to the option of selecting a group type
      */
     public final static Integer OPTION_GROUPTYPE = 1;
-    
+
+    //the pivot value
+    private int pivot = -1;
+    //the index of the first field involves in ROLLUP
+    private int rollupFieldIndex = 0;
+    //the original index of the first field involves in ROLLUP in case it was 
moved to the end
+    //(if we have the combination of cube and rollup)
+    private int rollupOldFieldIndex = 0;
+    //the size of total fields that involve in CUBE clause
+    private int dimensionSize = 0;
+
+    //number of algebraic function that used after rollup
+    private int nAlgebraic = 0;
+
+    public void setPivot(int pvt) {
+        this.pivot = pvt;
+    }
+
+    public int getPivot() {
+        return this.pivot;
+    }
+
+    public void setDimensionSize(int ds) {
+        this.dimensionSize = ds;
+    }
+
+    public int getDimensionSize() {
+        return this.dimensionSize;
+    }
+
+    public void setNumberAlgebraic(int na) {
+        this.nAlgebraic = na;
+    }
+
+    public int getNumberAlgebraic() {
+        return this.nAlgebraic;
+    }
+
+    public void setRollupOldFieldIndex(int rofi) {
+        this.rollupOldFieldIndex = rofi;
+    }
+
+    public int getRollupOldFieldIndex() {
+        return this.rollupOldFieldIndex;
+    }
+
+    public void setRollupFieldIndex(int rfi) {
+        this.rollupFieldIndex = rfi;
+    }
+
+    public int getRollupFieldIndex() {
+        return this.rollupFieldIndex;
+    }
+
     /**
      * Constructor for use in defining rule patterns
      * @param plan
      */
     public LOCogroup(LogicalPlan plan) {
-        super("LOCogroup", plan);     
+        super("LOCogroup", plan);
     }
-        
-    public LOCogroup(OperatorPlan plan, 
MultiMap<Integer,LogicalExpressionPlan> 
+
+    public LOCogroup(OperatorPlan plan, MultiMap<Integer,LogicalExpressionPlan>
     expressionPlans, boolean[] isInner ) {
         this( plan, expressionPlans, GROUPTYPE.REGULAR, isInner );
     }
 
-    public LOCogroup(OperatorPlan plan, 
MultiMap<Integer,LogicalExpressionPlan> 
+    public LOCogroup(OperatorPlan plan, MultiMap<Integer,LogicalExpressionPlan>
     expressionPlans, GROUPTYPE groupType, boolean[] isInner) {
         super("LOCogroup", plan);
         this.mExpressionPlans = expressionPlans;
@@ -93,7 +146,7 @@ public class LOCogroup extends LogicalRe
         }
         this.mGroupType = groupType;
     }
-    
+
     /**
      * Given an expression plan this function returns a LogicalFieldSchema
      * that can be generated using this expression plan
@@ -120,7 +173,7 @@ public class LOCogroup extends LogicalRe
         if (inputs == null) {
             throw new FrontendException(this, "Cannot get predecessor for " + 
this, 2233);
         }
-        
+
         List<LogicalFieldSchema> fieldSchemaList = new 
ArrayList<LogicalFieldSchema>();
 
         // See if we have more than one expression plans, if so the
@@ -139,7 +192,7 @@ public class LOCogroup extends LogicalRe
             LogicalSchema keySchema = new LogicalSchema();
             // We sort here to maintain the correct order of inputs
             for( Integer key : mExpressionPlans.keySet()) {
-                Collection<LogicalExpressionPlan> plans = 
+                Collection<LogicalExpressionPlan> plans =
                     mExpressionPlans.get(key);
 
                 for( LogicalExpressionPlan plan : plans ) {
@@ -175,14 +228,14 @@ public class LOCogroup extends LogicalRe
                     break;
                 }
                 break;
-            }           
+            }
         }
         if(mExpressionPlans.size() > 1){
             //reset the uid, because the group column is associated with more
             // than one input
             groupKeySchema.resetUid();
         }
-        
+
         if (groupKeySchema==null) {
             throw new FrontendException(this, "Cannot get group key schema for 
" + this, 2234);
         }
@@ -194,8 +247,8 @@ public class LOCogroup extends LogicalRe
         int counter = 0;
         for (Operator op : inputs) {
             LogicalSchema inputSchema = 
((LogicalRelationalOperator)op).getSchema();
-           
-            // Check if we already have calculated Uid for this bag for given 
+
+            // Check if we already have calculated Uid for this bag for given
             // input operator
             long bagUid;
             if (generatedInputUids.get(counter)!=null)
@@ -204,15 +257,15 @@ public class LOCogroup extends LogicalRe
                 bagUid = LogicalExpression.getNextUid();
                 generatedInputUids.put( counter, bagUid );
             }
-            
+
             LogicalFieldSchema newTupleFieldSchema = new LogicalFieldSchema(
                     null, inputSchema, DataType.TUPLE, 
LogicalExpression.getNextUid());
-            
+
             LogicalSchema bagSchema = new LogicalSchema();
             bagSchema.addField(newTupleFieldSchema);
-            
+
             LogicalFieldSchema newBagFieldSchema = new LogicalFieldSchema(
-                    ((LogicalRelationalOperator)op).getAlias(), bagSchema, 
+                    ((LogicalRelationalOperator)op).getAlias(), bagSchema,
                     DataType.BAG, bagUid);
 
             fieldSchemaList.add( newBagFieldSchema );
@@ -222,7 +275,7 @@ public class LOCogroup extends LogicalRe
         schema = new LogicalSchema();
         for(LogicalFieldSchema fieldSchema: fieldSchemaList) {
             schema.addField(fieldSchema);
-        }         
+        }
 
         return schema;
     }
@@ -239,32 +292,32 @@ public class LOCogroup extends LogicalRe
     public boolean isEqual(Operator other) throws FrontendException {
         if (other != null && other instanceof LOCogroup) {
             LOCogroup oc = (LOCogroup)other;
-            if( mGroupType == oc.mGroupType && 
-                    mIsInner.length == oc.mIsInner.length 
+            if( mGroupType == oc.mGroupType &&
+                    mIsInner.length == oc.mIsInner.length
                     && mExpressionPlans.size() == oc.mExpressionPlans.size() ) 
{
                 for( int i = 0; i < mIsInner.length; i++ ) {
                     if( mIsInner[i] != oc.mIsInner[i] ) {
                         return false;
                     }
                 }
-                for( Integer key : mExpressionPlans.keySet() ) {               
     
+                for( Integer key : mExpressionPlans.keySet() ) {
                     if( ! oc.mExpressionPlans.containsKey(key) ) {
                         return false;
                     }
-                    Collection<LogicalExpressionPlan> exp1 = 
+                    Collection<LogicalExpressionPlan> exp1 =
                         mExpressionPlans.get(key);
-                    Collection<LogicalExpressionPlan> exp2 = 
+                    Collection<LogicalExpressionPlan> exp2 =
                         oc.mExpressionPlans.get(key);
 
-                    if(! ( exp1 instanceof ArrayList<?> 
+                    if(! ( exp1 instanceof ArrayList<?>
                     || exp2 instanceof ArrayList<?> ) ) {
                         throw new FrontendException( "Expected an ArrayList " +
                         "of Expression Plans", 2235 );
                     }
 
-                    ArrayList<LogicalExpressionPlan> expList1 = 
+                    ArrayList<LogicalExpressionPlan> expList1 =
                         (ArrayList<LogicalExpressionPlan>) exp1;
-                    ArrayList<LogicalExpressionPlan> expList2 = 
+                    ArrayList<LogicalExpressionPlan> expList2 =
                         (ArrayList<LogicalExpressionPlan>) exp2;
 
                     for (int i = 0; i < expList1.size(); i++) {
@@ -282,37 +335,37 @@ public class LOCogroup extends LogicalRe
     public GROUPTYPE getGroupType() {
         return mGroupType;
     }
-    
+
     public void resetGroupType() {
         mGroupType = GROUPTYPE.REGULAR;
     }
-    
+
     /**
-     * Returns an Unmodifiable Map of Input Number to Uid 
+     * Returns an Unmodifiable Map of Input Number to Uid
      * @return Unmodifiable Map<Integer,Long>
      */
     public Map<Integer,Long> getGeneratedInputUids() {
         return Collections.unmodifiableMap( generatedInputUids );
     }
-    
+
     public MultiMap<Integer,LogicalExpressionPlan> getExpressionPlans() {
         return mExpressionPlans;
     }
-    
+
     public void setExpressionPlans(MultiMap<Integer,LogicalExpressionPlan> 
plans) {
         this.mExpressionPlans = plans;
     }
-    
+
     public void setGroupType(GROUPTYPE gt) {
         mGroupType = gt;
     }
-    
+
     public void setInnerFlags(boolean[] flags) {
         if( flags != null ) {
             mIsInner = Arrays.copyOf( flags, flags.length );
         }
     }
-    
+
     public boolean[] getInner() {
         return mIsInner;
     }
@@ -322,7 +375,7 @@ public class LOCogroup extends LogicalRe
         groupKeyUidOnlySchema = null;
         generatedInputUids = new HashMap<Integer,Long>();
     }
-    
+
     public List<Operator> getInputs(LogicalPlan plan) {
       return plan.getPredecessors(this);
     }

Modified: 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LOCube.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LOCube.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LOCube.java 
(original)
+++ 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LOCube.java 
Tue Jan 27 02:27:45 2015
@@ -84,6 +84,16 @@ import org.apache.pig.newplan.logical.ex
 public class LOCube extends LogicalRelationalOperator {
     private MultiMap<Integer, LogicalExpressionPlan> mExpressionPlans;
     private List<String> operations;
+    //the pivot position
+    private int pivot = -1;
+
+    public void setPivot(int pvt) {
+        this.pivot = pvt;
+    }
+
+    public int getPivot() {
+        return this.pivot;
+    }
 
     public LOCube(LogicalPlan plan) {
        super("LOCube", plan);

Modified: 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogToPhyTranslationVisitor.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogToPhyTranslationVisitor.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogToPhyTranslationVisitor.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogToPhyTranslationVisitor.java
 Tue Jan 27 02:27:45 2015
@@ -58,6 +58,7 @@ import org.apache.pig.backend.hadoop.exe
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.PONative;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackage;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.PORank;
+import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.PORollupHIIForEach;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSkewedJoin;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSort;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSplit;
@@ -900,6 +901,128 @@ public class LogToPhyTranslationVisitor
         translateSoftLinks(foreach);
     }
 
+    @Override
+    public void visit(LORollupHIIForEach hforeach) throws FrontendException {
+        String scope = DEFAULT_SCOPE;
+
+        List<PhysicalPlan> innerPlans = new ArrayList<PhysicalPlan>();
+
+        org.apache.pig.newplan.logical.relational.LogicalPlan inner = 
hforeach.getInnerPlan();
+        LOGenerate gen = (LOGenerate) inner.getSinks().get(0);
+
+        List<LogicalExpressionPlan> exps = gen.getOutputPlans();
+        List<Operator> preds = inner.getPredecessors(gen);
+
+        currentPlans.push(currentPlan);
+
+        // we need to translate each predecessor of LOGenerate into a physical 
plan.
+        // The physical plan should contain the expression plan for this 
predecessor plus
+        // the subtree starting with this predecessor
+        for (int i = 0; i < exps.size(); i++) {
+            currentPlan = new PhysicalPlan();
+            // translate the expression plan
+            PlanWalker childWalker = new 
ReverseDependencyOrderWalkerWOSeenChk(exps.get(i));
+            pushWalker(childWalker);
+            childWalker.walk(new ExpToPhyTranslationVisitor(exps.get(i), 
childWalker, gen,
+                    currentPlan, logToPhyMap));
+            popWalker();
+
+            List<Operator> leaves = exps.get(i).getSinks();
+            for (Operator l : leaves) {
+                PhysicalOperator op = logToPhyMap.get(l);
+                if (l instanceof ProjectExpression) {
+                    int input = ((ProjectExpression) l).getInputNum();
+
+                    // for each sink projection, get its input logical plan 
and translate it
+                    Operator pred = preds.get(input);
+                    childWalker = new SubtreeDependencyOrderWalker(inner, 
pred);
+                    pushWalker(childWalker);
+                    childWalker.walk(this);
+                    popWalker();
+
+                    // get the physical operator of the leaf of input logical 
plan
+                    PhysicalOperator leaf = logToPhyMap.get(pred);
+
+                    if (pred instanceof LOInnerLoad) {
+                        // if predecessor is only an LOInnerLoad, remove the 
project that
+                        // comes from LOInnerLoad and change the column of 
project that
+                        // comes from expression plan
+                        currentPlan.remove(leaf);
+                        logToPhyMap.remove(pred);
+
+                        POProject leafProj = (POProject) leaf;
+                        try {
+                            if (leafProj.isStar()) {
+                                ((POProject) op).setStar(true);
+                            } else if (leafProj.isProjectToEnd()) {
+                                ((POProject) 
op).setProjectToEnd(leafProj.getStartCol());
+                            } else {
+                                ((POProject) 
op).setColumn(leafProj.getColumn());
+                            }
+
+                        } catch (ExecException e) {
+                            throw new FrontendException(hforeach, "Cannot get 
column from " + leaf,
+                                    2230, e);
+                        }
+
+                    } else {
+                        currentPlan.connect(leaf, op);
+                    }
+                }
+            }
+            innerPlans.add(currentPlan);
+        }
+
+        currentPlan = currentPlans.pop();
+
+        // PhysicalOperator poGen = new POGenerate(new OperatorKey("",
+        // r.nextLong()), inputs, toBeFlattened);
+        boolean[] flatten = gen.getFlattenFlags();
+        List<Boolean> flattenList = new ArrayList<Boolean>();
+        for (boolean fl : flatten) {
+            flattenList.add(fl);
+        }
+        // Create new PORollupHIIForEach for translation from Logical Plan to 
Physical Plan
+        PORollupHIIForEach poHFE = new PORollupHIIForEach(new 
OperatorKey(scope, nodeGen.getNextNodeId(scope)),
+                hforeach.getRequestedParallelism(), innerPlans, flattenList);
+
+        // if the pivot position is zero, set the pivot position for physical 
op is zero
+        if(hforeach.getPivot() == 0)
+            poHFE.setPivot(0);
+        //else, decrease pivot position by one, because the position user 
specified and the
+        //rollup field index is different by one
+        else
+            poHFE.setPivot(hforeach.getPivot() - 1);
+        //get the start field index and size of rollup position in case the 
rollup does not stand at the front
+        poHFE.setRollupFieldIndex(hforeach.getRollupFieldIndex());
+        poHFE.setRollupOldFieldIndex(hforeach.getRollupOldFieldIndex());
+        poHFE.setRollupSize(hforeach.getRollupSize());
+        poHFE.setDimensionSize(hforeach.getDimensionSize());
+
+        poHFE.addOriginalLocation(hforeach.getAlias(), hforeach.getLocation());
+        poHFE.setResultType(DataType.BAG);
+        logToPhyMap.put(hforeach, poHFE);
+        currentPlan.add(poHFE);
+
+        // generate cannot have multiple inputs
+        List<Operator> op = hforeach.getPlan().getPredecessors(hforeach);
+
+        // generate may not have any predecessors
+        if (op == null)
+            return;
+
+        PhysicalOperator from = logToPhyMap.get(op.get(0));
+        try {
+            currentPlan.connect(from, poHFE);
+        } catch (Exception e) {
+            int errCode = 2015;
+            String msg = "Invalid physical operators in the physical plan";
+            throw new LogicalToPhysicalTranslatorException(msg, errCode, 
PigException.BUG, e);
+        }
+
+        translateSoftLinks(hforeach);
+    }
+
     /**
      * This function takes in a List of LogicalExpressionPlan and converts 
them to
      * a list of PhysicalPlans
@@ -1010,6 +1133,19 @@ public class LogToPhyTranslationVisitor
         case REGULAR:
             POPackage poPackage = compileToLR_GR_PackTrio(cg, 
cg.getCustomPartitioner(), cg.getInner(), cg.getExpressionPlans());
             poPackage.getPkgr().setPackageType(PackageType.GROUP);
+            if(cg.getPivot()!=-1) {
+                //Set the pivot value
+                poPackage.setPivot(cg.getPivot());
+                //Set the size of total fields that involve in CUBE clause
+                poPackage.setDimensionSize(cg.getDimensionSize());
+                //Set the index of the first field involves in ROLLUP
+                poPackage.setRollupFieldIndex(cg.getRollupFieldIndex());
+                //Set the original index of the first field involves in ROLLUP 
in case it was moved to the end
+                //(if we have the combination of cube and rollup)
+                poPackage.setRollupOldFieldIndex(cg.getRollupOldFieldIndex());
+                //Set number of algebraic functions that used after rollup
+                poPackage.setNumberAlgebraic(cg.getNumberAlgebraic());
+            }
             logToPhyMap.put(cg, poPackage);
             break;
         case MERGE:

Modified: 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogicalPlan.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogicalPlan.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogicalPlan.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogicalPlan.java
 Tue Jan 27 02:27:45 2015
@@ -263,6 +263,7 @@ public class LogicalPlan extends BaseOpe
             disabledOptimizerRules.add("ColumnMapKeyPrune");
             disabledOptimizerRules.add("AddForEach");
             disabledOptimizerRules.add("GroupByConstParallelSetter");
+            disabledOptimizerRules.add("RollupHIIOptimizer");
         }
 
         try {

Modified: 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogicalRelationalNodesVisitor.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogicalRelationalNodesVisitor.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogicalRelationalNodesVisitor.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogicalRelationalNodesVisitor.java
 Tue Jan 27 02:27:45 2015
@@ -57,6 +57,9 @@ public abstract class LogicalRelationalN
     public void visit(LOForEach foreach) throws FrontendException {
     }
 
+    public void visit(LORollupHIIForEach horeach) throws FrontendException {
+    }
+
     public void visit(LOGenerate gen) throws FrontendException {
     }
 

Modified: 
pig/branches/spark/src/org/apache/pig/newplan/logical/rules/OptimizerUtils.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/newplan/logical/rules/OptimizerUtils.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/newplan/logical/rules/OptimizerUtils.java 
(original)
+++ 
pig/branches/spark/src/org/apache/pig/newplan/logical/rules/OptimizerUtils.java 
Tue Jan 27 02:27:45 2015
@@ -27,6 +27,7 @@ import org.apache.pig.newplan.logical.ex
 import org.apache.pig.newplan.logical.expression.UserFuncExpression;
 import org.apache.pig.newplan.logical.relational.LOForEach;
 import org.apache.pig.newplan.logical.relational.LOGenerate;
+import org.apache.pig.newplan.logical.relational.LORollupHIIForEach;
 import org.apache.pig.newplan.logical.relational.LogicalPlan;
 
 public class OptimizerUtils {
@@ -41,6 +42,16 @@ public class OptimizerUtils {
     }
 
     /**
+     * Find generate op from the rolluphiiforeach operator.
+     * @param foreach the LORollupHIIForEach instance
+     * @return LOGenerate instance
+     */
+    public static LOGenerate findGenerate(LORollupHIIForEach hfe) {
+        LogicalPlan inner = hfe.getInnerPlan();
+        return (LOGenerate) inner.getSinks().get(0);
+    }
+
+    /**
      * Check if a given LOGenerate operator has any flatten fields.
      * @param gen the given LOGenerate instance
      * @return true if LOGenerate instance contains flatten fields, false 
otherwise

Modified: pig/branches/spark/src/org/apache/pig/parser/AliasMasker.g
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/parser/AliasMasker.g?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/src/org/apache/pig/parser/AliasMasker.g (original)
+++ pig/branches/spark/src/org/apache/pig/parser/AliasMasker.g Tue Jan 27 
02:27:45 2015
@@ -247,6 +247,10 @@ cube_clause
     : ^( CUBE cube_item )
 ;
 
+pivot_clause
+    : ^( PIVOT INTEGER )
+;
+
 cube_item
     : rel ( cube_by_clause )
 ;
@@ -260,7 +264,7 @@ cube_or_rollup
 ;
 
 cube_rollup_list
-    : ^( ( CUBE | ROLLUP ) cube_by_expr_list )
+    : ^( CUBE cube_by_expr_list ) | ^( ROLLUP cube_by_expr_list pivot_clause? )
 ;
 
 cube_by_expr_list
@@ -642,6 +646,7 @@ eid : rel_str_op
     | FOREACH
     | CUBE
     | ROLLUP
+    | PIVOT
     | MATCHES
     | ORDER
     | RANK

Modified: pig/branches/spark/src/org/apache/pig/parser/AstPrinter.g
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/parser/AstPrinter.g?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/src/org/apache/pig/parser/AstPrinter.g (original)
+++ pig/branches/spark/src/org/apache/pig/parser/AstPrinter.g Tue Jan 27 
02:27:45 2015
@@ -106,6 +106,10 @@ parallel_clause
     : ^( PARALLEL INTEGER ) { sb.append(" ").append($PARALLEL.text).append(" 
").append($INTEGER.text); }
 ;
 
+pivot_clause
+    : ^( PIVOT INTEGER ) { sb.append(" ").append($PIVOT.text).append(" 
").append($INTEGER.text); }
+;
+
 alias
     : IDENTIFIER { sb.append($IDENTIFIER.text); }
 ;
@@ -262,7 +266,7 @@ cube_or_rollup
 ;
 
 cube_rollup_list
-    : ^( ( CUBE { sb.append($CUBE.text).append("("); } | ROLLUP { 
sb.append($ROLLUP.text).append("("); } ) cube_by_expr_list { sb.append(")"); })
+    : ^( CUBE { sb.append($CUBE.text).append("("); } cube_by_expr_list { 
sb.append(")"); } ) | ^( ROLLUP { sb.append($ROLLUP.text).append("("); } 
cube_by_expr_list { sb.append(")"); } )
 ;
 
 cube_by_expr_list
@@ -270,7 +274,7 @@ cube_by_expr_list
 ;
 
 cube_by_expr
-    : col_range | expr | STAR { sb.append($STAR.text); }
+    : col_range | expr | STAR { sb.append($STAR.text); } { sb.append(" "); }
 ;
 
 group_clause
@@ -672,6 +676,7 @@ eid : rel_str_op
     | FOREACH   { sb.append($FOREACH.text); }
     | CUBE      { sb.append($CUBE.text); }
     | ROLLUP    { sb.append($ROLLUP.text); }
+    | PIVOT     { sb.append($PIVOT.text); }
     | MATCHES   { sb.append($MATCHES.text); }
     | ORDER     { sb.append($ORDER.text); }
     | RANK      { sb.append($RANK.text); }

Modified: pig/branches/spark/src/org/apache/pig/parser/AstValidator.g
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/parser/AstValidator.g?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/src/org/apache/pig/parser/AstValidator.g (original)
+++ pig/branches/spark/src/org/apache/pig/parser/AstValidator.g Tue Jan 27 
02:27:45 2015
@@ -296,6 +296,10 @@ cube_clause
  : ^( CUBE cube_item )
 ;
 
+pivot_clause
+    : ^( PIVOT INTEGER )
+;
+
 cube_item
  : rel ( cube_by_clause )
 ;
@@ -309,7 +313,7 @@ cube_or_rollup
 ;
 
 cube_rollup_list
- : ^( ( CUBE | ROLLUP ) cube_by_expr_list )
+ : ^( CUBE cube_by_expr_list ) | ^( ROLLUP cube_by_expr_list pivot_clause? )
 ;
 
 cube_by_expr_list
@@ -663,6 +667,7 @@ eid : rel_str_op
     | FOREACH
     | CUBE
     | ROLLUP
+    | PIVOT
     | MATCHES
     | ORDER
     | RANK

Modified: pig/branches/spark/src/org/apache/pig/parser/DryRunGruntParser.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/parser/DryRunGruntParser.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/src/org/apache/pig/parser/DryRunGruntParser.java 
(original)
+++ pig/branches/spark/src/org/apache/pig/parser/DryRunGruntParser.java Tue Jan 
27 02:27:45 2015
@@ -379,4 +379,11 @@ public class DryRunGruntParser extends P
        protected void printClear() {
        }
 
+    @Override
+    protected void processDefault(String key, String value) throws IOException 
{
+    }
+
+    @Override
+    protected void processDeclare(String key, String value) throws IOException 
{
+    }
 }

Modified: pig/branches/spark/src/org/apache/pig/parser/LogicalPlanBuilder.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/parser/LogicalPlanBuilder.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/src/org/apache/pig/parser/LogicalPlanBuilder.java 
(original)
+++ pig/branches/spark/src/org/apache/pig/parser/LogicalPlanBuilder.java Tue 
Jan 27 02:27:45 2015
@@ -451,10 +451,30 @@ public class LogicalPlanBuilder {
         return new LOCube(plan);
     }
 
+    void setPivotRollupCubeOp(LOCube op, Integer pivot) throws 
ParserValidationException {
+        if(pivot!=null)
+            op.setPivot(pivot);
+    }
+
     String buildCubeOp(SourceLocation loc, LOCube op, String alias, String 
inputAlias,
         List<String> operations, MultiMap<Integer, LogicalExpressionPlan> 
expressionPlans)
         throws ParserValidationException {
 
+        // check value of pivot if it is valid or not, if not pivot position
+        // is specified, the pivot at middle position will be chosen
+        try {
+            if(op.getPivot()!=-1) {
+                if (op.getPivot() < 0 || op.getPivot() >= 
expressionPlans.get(0).size()) {
+                    FrontendException fe = new FrontendException("PIVOT is out 
of bound");
+                    throw fe;
+                }
+            }
+            else
+                
op.setPivot((int)(Math.round(expressionPlans.get(0).size()/2.0)));
+        } catch (FrontendException e) {
+            throw new ParserValidationException(intStream, loc, e);
+        }
+
         // check if continuously occurring cube operations be combined
         combineCubeOperations((ArrayList<String>) operations, expressionPlans);
 
@@ -713,6 +733,7 @@ public class LogicalPlanBuilder {
 
         // build group by operator
         try {
+            groupby.setPivot(op.getPivot());
             return buildGroupOp(loc, (LOCogroup) groupby, op.getAlias(), 
inpAliases, exprPlansCopy,
                 GROUPTYPE.REGULAR, innerFlags, null);
         } catch (ParserValidationException pve) {

Modified: pig/branches/spark/src/org/apache/pig/parser/LogicalPlanGenerator.g
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/parser/LogicalPlanGenerator.g?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/src/org/apache/pig/parser/LogicalPlanGenerator.g 
(original)
+++ pig/branches/spark/src/org/apache/pig/parser/LogicalPlanGenerator.g Tue Jan 
27 02:27:45 2015
@@ -493,12 +493,20 @@ func_args returns[List<String> args]
 // It also outputs the order of operations i.e in this case CUBE operation 
followed by ROLLUP operation
 // These inputs are passed to buildCubeOp methods which then builds the 
logical plan for CUBE operator.
 // If user specifies STAR or RANGE expression for dimensions then it will be 
expanded inside buildCubeOp.
+pivot_clause returns[int pivot]
+ : ^( PIVOT INTEGER )
+   {
+       $pivot = Integer.parseInt( $INTEGER.text );
+   }
+;
+
 cube_clause returns[String alias]
 scope {
     LOCube cubeOp;
     MultiMap<Integer, LogicalExpressionPlan> cubePlans;
     List<String> operations;
     int inputIndex;
+    int pivot;
 }
 scope GScope;
 @init {
@@ -548,7 +556,7 @@ cube_rollup_list returns[String operatio
 @init {
     $plans = new ArrayList<LogicalExpressionPlan>();
 }
- : ^( ( CUBE { $operation = "CUBE"; } | ROLLUP { $operation = "ROLLUP"; } ) 
cube_by_expr_list { $plans = $cube_by_expr_list.plans; } )
+ : ^( CUBE { $operation = "CUBE"; } cube_by_expr_list { $plans = 
$cube_by_expr_list.plans; } ) | ^( ROLLUP { $operation = "ROLLUP"; } 
cube_by_expr_list { $plans = $cube_by_expr_list.plans; } pivot_clause? { if 
($pivot_clause.tree!=null) builder.setPivotRollupCubeOp($cube_clause::cubeOp, 
$pivot_clause.pivot); } )
 ;
 
 cube_by_expr_list returns[List<LogicalExpressionPlan> plans]
@@ -1941,6 +1949,7 @@ eid returns[String id] : rel_str_op { $i
     | COGROUP { $id = $COGROUP.text; }
     | CUBE { $id = $CUBE.text; }
     | ROLLUP { $id = $ROLLUP.text; }
+    | PIVOT { $id = $PIVOT.text; }
     | JOIN { $id = $JOIN.text; }
     | CROSS { $id = $CROSS.text; }
     | UNION { $id = $UNION.text; }

Modified: pig/branches/spark/src/org/apache/pig/parser/QueryLexer.g
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/parser/QueryLexer.g?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/src/org/apache/pig/parser/QueryLexer.g (original)
+++ pig/branches/spark/src/org/apache/pig/parser/QueryLexer.g Tue Jan 27 
02:27:45 2015
@@ -153,6 +153,9 @@ ONSCHEMA : 'ONSCHEMA'
 PARALLEL : 'PARALLEL'
 ;
 
+PIVOT : 'PIVOT'
+;
+
 PARTITION : 'PARTITION'
 ;
 

Modified: pig/branches/spark/src/org/apache/pig/parser/QueryParser.g
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/parser/QueryParser.g?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/src/org/apache/pig/parser/QueryParser.g (original)
+++ pig/branches/spark/src/org/apache/pig/parser/QueryParser.g Tue Jan 27 
02:27:45 2015
@@ -595,7 +595,10 @@ union_clause : UNION^ ONSCHEMA? rel_list
 cube_clause : CUBE rel BY cube_rollup_list ( COMMA cube_rollup_list )* -> ^( 
CUBE rel ^( BY cube_rollup_list+ ) )
 ;
 
-cube_rollup_list : ( CUBE | ROLLUP )^ LEFT_PAREN! real_arg ( COMMA! real_arg 
)* RIGHT_PAREN!
+cube_rollup_list : ( CUBE^ LEFT_PAREN! real_arg ( COMMA! real_arg )* 
RIGHT_PAREN! ) | ( ROLLUP^ LEFT_PAREN! real_arg ( COMMA! real_arg )* 
RIGHT_PAREN! pivot_clause? )
+;
+
+pivot_clause : PIVOT^ INTEGER
 ;
 
 flatten_clause : FLATTEN^ LEFT_PAREN! expr RIGHT_PAREN!

Modified: pig/branches/spark/src/org/apache/pig/tools/grunt/GruntParser.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/tools/grunt/GruntParser.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/src/org/apache/pig/tools/grunt/GruntParser.java 
(original)
+++ pig/branches/spark/src/org/apache/pig/tools/grunt/GruntParser.java Tue Jan 
27 02:27:45 2015
@@ -451,12 +451,16 @@ public class GruntParser extends PigScri
     @Override
     protected void processRegister(String jar) throws IOException {
         filter.validate(PigCommandFilter.Command.REGISTER);
+        jar = parameterSubstitutionInGrunt(jar);
         mPigServer.registerJar(jar);
     }
 
     @Override
     protected void processRegister(String path, String scriptingLang, String 
namespace) throws IOException, ParseException {
         filter.validate(PigCommandFilter.Command.REGISTER);
+        path = parameterSubstitutionInGrunt(path);
+        scriptingLang = parameterSubstitutionInGrunt(scriptingLang);
+        namespace = parameterSubstitutionInGrunt(namespace);
         if(path.endsWith(".jar")) {
             if(scriptingLang != null || namespace != null) {
                 throw new ParseException("Cannot register a jar with a 
scripting language or namespace");
@@ -568,6 +572,8 @@ public class GruntParser extends PigScri
     @Override
     protected void processSet(String key, String value) throws IOException, 
ParseException {
         filter.validate(PigCommandFilter.Command.SET);
+        key = parameterSubstitutionInGrunt(key);
+        value = parameterSubstitutionInGrunt(value);
         if (key.equals("debug"))
         {
             if (value.equals("on"))
@@ -636,6 +642,7 @@ public class GruntParser extends PigScri
     @Override
     protected void processCat(String path) throws IOException {
         filter.validate(PigCommandFilter.Command.CAT);
+        path = parameterSubstitutionInGrunt(path);
         if(mExplain == null) { // process only if not in "explain" mode
 
             executeBatch();
@@ -685,6 +692,7 @@ public class GruntParser extends PigScri
     @Override
     protected void processCD(String path) throws IOException {
         filter.validate(PigCommandFilter.Command.CD);
+        path = parameterSubstitutionInGrunt(path);
         ContainerDescriptor container;
         if(mExplain == null) { // process only if not in "explain" mode
 
@@ -820,6 +828,7 @@ public class GruntParser extends PigScri
     @Override
     protected void processLS(String path) throws IOException {
         filter.validate(PigCommandFilter.Command.LS);
+        path = parameterSubstitutionInGrunt(path);
 
         if (mExplain == null) { // process only if not in "explain" mode
 
@@ -945,6 +954,8 @@ public class GruntParser extends PigScri
     protected void processMove(String src, String dst) throws IOException
     {
         filter.validate(PigCommandFilter.Command.MV);
+        src = parameterSubstitutionInGrunt(src);
+        dst = parameterSubstitutionInGrunt(dst);
         if(mExplain == null) { // process only if not in "explain" mode
 
             executeBatch();
@@ -971,6 +982,8 @@ public class GruntParser extends PigScri
     protected void processCopy(String src, String dst) throws IOException
     {
         filter.validate(PigCommandFilter.Command.CP);
+        src = parameterSubstitutionInGrunt(src);
+        dst = parameterSubstitutionInGrunt(dst);
         if(mExplain == null) { // process only if not in "explain" mode
 
             executeBatch();
@@ -993,6 +1006,8 @@ public class GruntParser extends PigScri
     protected void processCopyToLocal(String src, String dst) throws 
IOException
     {
         filter.validate(PigCommandFilter.Command.COPYTOLOCAL);
+        src = parameterSubstitutionInGrunt(src);
+        dst = parameterSubstitutionInGrunt(dst);
         if(mExplain == null) { // process only if not in "explain" mode
 
             executeBatch();
@@ -1015,6 +1030,8 @@ public class GruntParser extends PigScri
     protected void processCopyFromLocal(String src, String dst) throws 
IOException
     {
         filter.validate(PigCommandFilter.Command.COPYFROMLOCAL);
+        src = parameterSubstitutionInGrunt(src);
+        dst = parameterSubstitutionInGrunt(dst);
         if(mExplain == null) { // process only if not in "explain" mode
 
             executeBatch();
@@ -1037,6 +1054,7 @@ public class GruntParser extends PigScri
     protected void processMkdir(String dir) throws IOException
     {
         filter.validate(PigCommandFilter.Command.MKDIR);
+        dir = parameterSubstitutionInGrunt(dir);
         if(mExplain == null) { // process only if not in "explain" mode
 
             executeBatch();
@@ -1052,6 +1070,7 @@ public class GruntParser extends PigScri
     protected void processPig(String cmd) throws IOException
     {
         int start = 1;
+        cmd = parameterSubstitutionInGrunt(cmd);
         if (!mInteractive) {
             start = getLineNumber();
         }
@@ -1068,6 +1087,7 @@ public class GruntParser extends PigScri
     protected void processRemove(String path, String options) throws 
IOException {
         filter.validate(PigCommandFilter.Command.RM);
         filter.validate(PigCommandFilter.Command.RMF);
+        path = parameterSubstitutionInGrunt(path);
         int MAX_MS_TO_WAIT_FOR_FILE_DELETION = 10 * 60 * 1000;
         int MS_TO_SLEEP_WHILE_WAITING_FOR_FILE_DELETION = 250;
 
@@ -1112,6 +1132,9 @@ public class GruntParser extends PigScri
     @Override
     protected void processFsCommand(String[] cmdTokens) throws IOException {
         filter.validate(PigCommandFilter.Command.FS);
+        for (int i = 0 ; i < cmdTokens.length ; i++) {
+            cmdTokens[i] = parameterSubstitutionInGrunt(cmdTokens[i]);
+        }
         if(mExplain == null) { // process only if not in "explain" mode
 
             executeBatch();
@@ -1138,6 +1161,9 @@ public class GruntParser extends PigScri
     @Override
     protected void processShCommand(String[] cmdTokens) throws IOException {
         filter.validate(PigCommandFilter.Command.SH);
+        for (int i = 0 ; i < cmdTokens.length ; i++) {
+            cmdTokens[i] = parameterSubstitutionInGrunt(cmdTokens[i]);
+        }
         if(mExplain == null) { // process only if not in "explain" mode
             try {
                 executeBatch();
@@ -1241,6 +1267,7 @@ public class GruntParser extends PigScri
 
     @Override
     protected void processSQLCommand(String cmd) throws IOException{
+        cmd = parameterSubstitutionInGrunt(cmd);
         if(mExplain == null) { // process only if not in "explain" mode
             if 
(!mPigServer.getPigContext().getProperties().get("pig.sql.type").equals("hcat"))
 {
                 throw new IOException("sql command only support hcat 
currently");
@@ -1259,6 +1286,22 @@ public class GruntParser extends PigScri
         }
     }
 
+    @Override
+    protected void processDefault(String key, String value) throws IOException 
{
+        parameterSubstitutionInGrunt("%default " + key + " " + value);
+    }
+
+    @Override
+    protected void processDeclare(String key, String value) throws IOException 
{
+        parameterSubstitutionInGrunt("%declare " + key + " " + value);
+    }
+
+    private String parameterSubstitutionInGrunt(String input) throws 
IOException {
+        if (mInteractive && input != null) {
+            return mPigServer.getPigContext().doParamSubstitution(new 
BufferedReader(new StringReader(input))).trim();
+        }
+        return input;
+    }
     /**
      * StreamPrinter.
      *

Modified: 
pig/branches/spark/src/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj 
(original)
+++ 
pig/branches/spark/src/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj 
Tue Jan 27 02:27:45 2015
@@ -120,6 +120,10 @@ public abstract class PigScriptParser
 
        abstract protected void processScript(String script, boolean batch, 
List<String> params, List<String> files) throws IOException, ParseException;
 
+    abstract protected void processDefault(String key, String value) throws 
IOException;
+
+    abstract protected void processDeclare(String key, String value) throws 
IOException;
+
        static String unquote(String s)
        {
                if (s.charAt(0) == '\'' && s.charAt(s.length()-1) == '\'')
@@ -186,6 +190,8 @@ TOKEN: {<XML: "-xml">}
 TOKEN: {<OUT: "-out">}
 TOKEN: {<BRIEF: "-brief">}
 TOKEN: {<N: "-n">}
+TOKEN: {<PIGDEFAULT: "%default" >}
+TOKEN: {<DECLARE: "%declare" >} 
 
 // internal use commands
 TOKEN: {<SCRIPT_DONE: "scriptDone">}
@@ -420,6 +426,7 @@ TOKEN : { <QUOTEDSTRING :  "'"
           )
       )*
       "'"> }
+TOKEN: {<SHELLCMD: "`" (~["`"])* "`" >}
 void parse() throws IOException:
 {
        Token t1, t2, t3;
@@ -598,6 +605,16 @@ void parse() throws IOException:
                {processSet();}
        )
        |
+    <PIGDEFAULT>
+       t1 = GetKey()
+       t2 = GetDefaultValue()
+       {processDefault(t1.image, t2.image);}
+       |
+       <DECLARE>
+       t1 = GetKey()
+       t2 = GetDefaultValue()
+       {processDeclare(t1.image, t2.image);}
+       |
        <EOF>
        {quit();}
        |
@@ -796,6 +813,22 @@ Token GetValue() :
        )
 
        {return t;}
+}
+
+Token GetDefaultValue() :
+{
+       Token t;
+}
+{
+       (
+       t = GetPath()
+       |
+       t = <QUOTEDSTRING>
+       |
+       t = <SHELLCMD>
+       )
+
+       {return t;}
 }
 
 Token GetReserved () :

Modified: 
pig/branches/spark/src/org/apache/pig/tools/pigstats/tez/TezScriptState.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/tools/pigstats/tez/TezScriptState.java?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/tools/pigstats/tez/TezScriptState.java 
(original)
+++ 
pig/branches/spark/src/org/apache/pig/tools/pigstats/tez/TezScriptState.java 
Tue Jan 27 02:27:45 2015
@@ -203,7 +203,7 @@ public class TezScriptState extends Scri
         return dagScriptInfo.get(dagName);
     }
 
-    static class TezDAGScriptInfo {
+    public static class TezDAGScriptInfo {
 
         private static final Log LOG = 
LogFactory.getLog(TezDAGScriptInfo.class);
         private TezOperPlan tezPlan;

Modified: pig/branches/spark/test/e2e/harness/build.xml
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/e2e/harness/build.xml?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/e2e/harness/build.xml (original)
+++ pig/branches/spark/test/e2e/harness/build.xml Tue Jan 27 02:27:45 2015
@@ -41,6 +41,7 @@
             <include name="Log.pm"/>
             <include name="Properties.pm"/>
             <include name="test_harness.pl"/>
+            <include name="xmlReport.pl"/>
         </fileset>
     </copy>
 

Modified: pig/branches/spark/test/e2e/harness/xmlReport.pl
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/e2e/harness/xmlReport.pl?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/e2e/harness/xmlReport.pl (original)
+++ pig/branches/spark/test/e2e/harness/xmlReport.pl Tue Jan 27 02:27:45 2015
@@ -15,7 +15,7 @@ unshift( @INC, ".");
 ########################################################################
 # Sub: testcase
 # Prints the result for a test case.
-# 
+#
 # Parameters:
 # $class - the test file
 # $test  - the test name
@@ -24,115 +24,122 @@ unshift( @INC, ".");
 # Returns:
 # None
 #
-sub getTestcase($$$$$$){
-
-   my ( $class_name, $group_name, $test_name, $time, $status, $msg )=@_;
-   my $result = "";
-
-   if ( $status =~ "passed" ){
-      $result = "\n<testcase classname=\"$class_name\" name=\"$test_name\" 
time=\"$time\"/>";
+sub getTestcase($$$$$){
 
-   } elsif ( $status =~ "skipped" ){
-      $result = "\n<testcase classname=\"$class_name\" name=\"$test_name\" 
time=\"0\">"
-              . "\n<skipped type=\"$status\">$msg</skipped>"
-              . "\n</testcase>"
-              ;
-   } else {
-      $result = "\n<testcase classname=\"$class_name\" name=\"$test_name\" 
time=\"$time\">" 
-              . "\n<error type=\"$status\">$msg</error>"
-              . "\n</testcase>"
-              ;
-   } return $result; 
+    my ( $exec_type, $test_name, $time, $status, $msg )=@_;
+    my $result = "";
+    my $group_name = substr ($test_name, 0, rindex ($test_name, "_")); 
+
+    if ( $status =~ "passed" ){
+        $result = "\n<testcase classname=\"$exec_type.$group_name\" 
name=\"$test_name\" time=\"$time\"/>";
+    } elsif ( $status =~ "skipped" ){
+        $result = "\n<testcase classname=\"$exec_type.$group_name\" 
name=\"$test_name\" time=\"0\">"
+                  . "\n<skipped type=\"$status\">$msg</skipped>"
+                  . "\n</testcase>"
+                  ;
+    } else {
+        $result = "\n<testcase classname=\"$exec_type.$group_name\" 
name=\"$test_name\" time=\"$time\">"
+                  . "\n<error type=\"$status\">$msg</error>"
+                  . "\n</testcase>"
+                  ;
+    }
+    return $result;
 }
 
 
-sub printXmlReport($) {
+sub printXmlReport($$) {
 
-   my $host = Sys::Hostname::hostname();
-   my $tmpFileName =  shift;
-   my $reportFileName =  shift;
-
-   my $testcases = "";
-   my $total_time= 0;
-   my $testnameSuffix = "";
-
-   if ($tmpFileName =~ m/-local/) {
-      $testnameSuffix = "_local";
-   }
-
-   my $passedCount  = 0;
-   my $failureCount = 0;
-   my $errorCount   = 0;
-   my $skippedCount = 0;
-   my $totalCount = 0;
-
-   my %test2starttime = { '', 0 };
-
-   open( IN, "$tmpFileName")
-      || die "Could not open $tmpFileName\n";
-
-   while(<IN>){
-
-      my $line = $_;
-      # e.g.: "Beginning test Checkin_1 at 1346855793"
-      if ( $line =~ m/Beginning test (.*) at (.*)/ ) {
-       # put "test - start time" pair to the hash:     
-        $test2starttime{ $1 } = $2; 
-      } else { 
-       if ( $line =~ "TestDriver::run" ) {
-        my $duration = 0;
-         if ( $line =~ m/TestDriver::run.*Failed to run test (.*) <(.*)/ ) {
-            # ERROR TestDriver::run at : 470 Failed to run test 
ClassResolution_1 <Failed running 
./out/pigtest/hadoopqa/hadoopqa.1327755958/ClassResolution_1_benchmark.pig
-            #print "test aborted: $line";
-            $testcases .= getTestcase ( $1, "group", $1 . $testnameSuffix, 0, 
"aborted", $2 );   
-            $errorCount++;
-         } elsif ( $line =~ m/TestDriver::run.*Test (.*) SUCCEEDED at (.*)/ ) {
-            # INFO: TestDriver::run() at 444:Test Unicode_cmdline_1 SUCCEEDED 
at 1327751873 
-            #print "test passed: $1 $2 line: $line";
-            $passedCount++;
-            $duration = $2 - $test2starttime{ $1 }; 
-            $testcases .= getTestcase ( $1, "group", $1 . $testnameSuffix, 
$duration, "passed", "" );     
-         } elsif ( $line =~ m/TestDriver::run.*Test (.*) FAILED at (.*)/ ) {
-            $failureCount++;
-            $duration = $2 - $test2starttime{ $1 };
-            $testcases .= getTestcase ( $1, "group", $1 . $testnameSuffix, 
$duration, "failed", "" );
-            #print "test failed: $1 $2 line: $line";
-         } elsif ($line =~ "Running TEST GROUP") {
-            next;
-         } elsif ($line =~ m/TestDriver::run.*Test (.*) SKIPPED at (.*)/) {
-            # INFO: TestDriver::run() at 444:Test StreamingLocalErrors_1 
SKIPPED at 1327923441
-            $skippedCount++;
-            $duration = $2 - $test2starttime{ $1 };
-            $testcases .= getTestcase ( $1, "group", $1 . $testnameSuffix, 
$duration, "skipped", "" );
-         } else {
-            print STDERR "Ignored line: $line";
-            next;
-         }    
-         #$total_time= $total_time + $time;
-         $totalCount++;
-      }
-     }
-
-   }
-   close(IN);
-   #Report
-   my $host = Sys::Hostname::hostname();
-   my $run_name = "e2e tests";
-   my $report=
-       '<?xml version="1.0" encoding="UTF-8" ?>'
-     . "\n<testsuite errors=\"$errorCount\" failures=\"$failureCount\" 
skips=\"$skippedCount\"  hostname=\"$host\" name=\"$run_name\" 
tests=\"$totalCount\" time=\"$total_time\">" 
-     . "\n$testcases" 
-     . "\n</testsuite>" 
-     . "\n";
-   print $report;
+    my $host = Sys::Hostname::hostname();
+    my $reportFileName =  shift;
+    my $execType = shift;
+
+    my $testcases = "";
+    my $testRunStartTime = 0;
+    my $testRunEndTime = 0;
+
+    my $passedCount  = 0;
+    my $failureCount = 0;
+    my $errorCount   = 0;
+    my $skippedCount = 0;
+    my $totalCount = 0;
+
+    my %test2starttime = { '', 0 };
+
+    open( IN, "$reportFileName")
+        || die "Could not open $reportFileName\n";
+
+    while(<IN>) {
+
+        my $line = $_;
+        if ($line =~ m/Beginning test run at (.*)/ ) {
+            # e.g: Beginning test run at 1416422503
+            $testRunStartTime = $1;
+        } elsif ($line =~ m/Finished test run at (.*)/ ) {
+            # e.g: Finished test run at 1416428021
+            $testRunEndTime = $1;
+        } elsif ( $line =~ m/Beginning test (.*) at (.*)/ ) {
+            # e.g.: "Beginning test Checkin_1 at 1346855793"
+           # put "test - start time" pair to the hash: 
+            $test2starttime{ $1 } = $2;
+        } elsif ( $line =~ "TestDriver::run" ) {
+               my $duration = 0;
+            if ( $line =~ m/TestDriver::run.*Failed to run test (.*) <(.*)/ ) {
+                # ERROR TestDriver::run at : 470 Failed to run test 
ClassResolution_1 <Failed running 
./out/pigtest/hadoopqa/hadoopqa.1327755958/ClassResolution_1_benchmark.pig
+                #print "test aborted: $line";
+                $testcases .= getTestcase ( $execType, $1 , 0, "aborted", $2 );
+                $errorCount++;
+            } elsif ( $line =~ m/TestDriver::run.*Test (.*) SUCCEEDED at (.*)/ 
) {
+                # INFO: TestDriver::run() at 444:Test Unicode_cmdline_1 
SUCCEEDED at 1327751873
+                #print "test passed: $1 $2 line: $line";
+                $passedCount++;
+                $duration = $2 - $test2starttime{ $1 };
+                $testcases .= getTestcase ( $execType, $1 , $duration, 
"passed", "" );
+            } elsif ( $line =~ m/TestDriver::run.*Test (.*) FAILED at (.*)/ ) {
+                $failureCount++;
+                $duration = $2 - $test2starttime{ $1 };
+                $testcases .= getTestcase ( $execType, $1 , $duration, 
"failed", "" );
+                #print "test failed: $1 $2 line: $line";
+            } elsif ($line =~ "Running TEST GROUP") {
+                next;
+            } elsif ($line =~ m/TestDriver::run.*Test (.*) SKIPPED at (.*)/) {
+                # INFO: TestDriver::run() at 444:Test StreamingLocalErrors_1 
SKIPPED at 1327923441
+                $skippedCount++;
+                $duration = $2 - $test2starttime{ $1 };
+                $testcases .= getTestcase ( $execType, $1 , $duration, 
"skipped", "" );
+            } else {
+                print STDERR "Ignored line: $line";
+                next;
+            }
+            $totalCount++;
+        }
+
+    }
+    close(IN);
+    #Report
+    my $host = Sys::Hostname::hostname();
+    my $run_name = $execType . " e2e tests";
+    my $total_time = $testRunEndTime - $testRunStartTime;
+    my $report= '<?xml version="1.0" encoding="UTF-8" ?>'
+                . "\n<testsuite errors=\"$errorCount\" 
failures=\"$failureCount\" skips=\"$skippedCount\"  hostname=\"$host\" 
name=\"$run_name\" tests=\"$totalCount\" time=\"$total_time\">"
+                . "\n$testcases"
+                . "\n</testsuite>"
+                . "\n";
+    print $report;
 }
 
 
 if (!defined( $ARGV[0] )) {
-   die "No input log file specified\n";
+    die "No input log file specified\n";
+}
+
+my $execType = $ARGV[1];
+if (!defined( $ARGV[1] )) {
+    print stderr "No execution type specified. Using mapred as default\n";
+    $execType = "mapred";
 }
 
-printXmlReport($ARGV[0]);
+# Harness log file name, exec type which will be used as test group name
+printXmlReport($ARGV[0], $execType);
 
 
 

Modified: pig/branches/spark/test/e2e/pig/build.xml
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/e2e/pig/build.xml?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/e2e/pig/build.xml (original)
+++ pig/branches/spark/test/e2e/pig/build.xml Tue Jan 27 02:27:45 2015
@@ -305,6 +305,7 @@
       <arg value="${test.location}/tests/macro.conf"/>
       <arg value="${test.location}/tests/orc.conf"/>
       <arg value="${test.location}/tests/hcat.conf"/>
+      <arg value="${test.location}/tests/utf8.conf"/>
     </exec>
   </target>
 

Modified: pig/branches/spark/test/e2e/pig/deployers/ExistingClusterDeployer.pm
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/e2e/pig/deployers/ExistingClusterDeployer.pm?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/e2e/pig/deployers/ExistingClusterDeployer.pm 
(original)
+++ pig/branches/spark/test/e2e/pig/deployers/ExistingClusterDeployer.pm Tue 
Jan 27 02:27:45 2015
@@ -240,6 +240,16 @@ sub generateData
             'filetype' => "ranking",
             'rows' => 30,
             'hdfs' => "singlefile/prerank",
+        }, {
+            'name' => "utf8Voter",
+            'filetype' => "utf8Voter",
+            'rows' => 30,
+            'hdfs' => "utf8Data/选民/utf8Voter",
+        }, {
+            'name' => "utf8Student",
+            'filetype' => "utf8Student",
+            'rows' => 300,
+            'hdfs' => "utf8Data/学生/utf8Student",
         }
     );
 

Modified: pig/branches/spark/test/e2e/pig/drivers/TestDriverPig.pm
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/e2e/pig/drivers/TestDriverPig.pm?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/e2e/pig/drivers/TestDriverPig.pm (original)
+++ pig/branches/spark/test/e2e/pig/drivers/TestDriverPig.pm Tue Jan 27 
02:27:45 2015
@@ -277,12 +277,7 @@ sub runPigCmdLine
 
     # Build the command
     my @baseCmd = $self->getPigCmd($testCmd, $log);
-    if ($testCmd->{'exectype'} eq "tez") {
-        push(@baseCmd, ("-x", "tez"));
-    }
-    if ($testCmd->{'exectype'} eq "spark") {
-        push(@baseCmd, ("-x", "spark"));
-    }
+    push(@baseCmd, ("-x", $testCmd->{'exectype'}));
     my @cmd = @baseCmd;
 
     # Add option -l giving location for secondary logs
@@ -440,9 +435,9 @@ sub getPigCmd($$$)
            $additionalJavaParams .= " -Dmapred.local.dir=$hadoopTmpDir 
-Dmapreduce.cluster.local.dir=$hadoopTmpDir";
         }
         TestDriver::dbg("Additional java parameters: 
[$additionalJavaParams].\n");
-
-        push(@pigCmd, ("-x", "local"));
     }
+    
+    push(@pigCmd, ("-x", $testCmd->{'exectype'}));
 
     if (defined($testCmd->{'java_params'}) || defined($additionalJavaParams)) {
         if (defined($testCmd->{'java_params'})) {
@@ -506,12 +501,7 @@ sub runPig
 
     # Build the command
     my @baseCmd = $self->getPigCmd($testCmd, $log);
-    if ($testCmd->{'exectype'} eq "tez") {
-        push(@baseCmd, ("-x", "tez"));
-    }
-    if ($testCmd->{'exectype'} eq "spark") {
-        push(@baseCmd, ("-x", "spark"));
-    }
+    push(@baseCmd, ("-x", $testCmd->{'exectype'}));
     my @cmd = @baseCmd;
 
     # Add option -l giving location for secondary logs

Modified: pig/branches/spark/test/e2e/pig/tests/bigdata.conf
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/test/e2e/pig/tests/bigdata.conf?rev=1654955&r1=1654954&r2=1654955&view=diff
==============================================================================
--- pig/branches/spark/test/e2e/pig/tests/bigdata.conf (original)
+++ pig/branches/spark/test/e2e/pig/tests/bigdata.conf Tue Jan 27 02:27:45 2015
@@ -92,7 +92,7 @@ store c into ':OUTPATH:';\,
         ]
         },
         {
-        'name' => 'BigData_Stream',
+        'name' => 'BigData_Streaming',
         'tests' => [
             {
             'num' => 1,


Reply via email to