Author: gates
Date: Fri Dec 18 23:15:57 2009
New Revision: 892396

URL: http://svn.apache.org/viewvc?rev=892396&view=rev
Log:
PIG-1156 Add aliases to ExecJobs and PhysicalOperators.

Added:
    hadoop/pig/trunk/test/org/apache/pig/test/TestBatchAliases.java
Modified:
    hadoop/pig/trunk/CHANGES.txt
    hadoop/pig/trunk/src/org/apache/pig/backend/executionengine/ExecJob.java
    
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
    hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HJob.java
    
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/LogToPhyTranslationVisitor.java
    
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/PhysicalOperator.java
    
hadoop/pig/trunk/src/org/apache/pig/backend/local/executionengine/LocalJob.java
    hadoop/pig/trunk/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld

Modified: hadoop/pig/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/CHANGES.txt?rev=892396&r1=892395&r2=892396&view=diff
==============================================================================
--- hadoop/pig/trunk/CHANGES.txt (original)
+++ hadoop/pig/trunk/CHANGES.txt Fri Dec 18 23:15:57 2009
@@ -24,6 +24,8 @@
 
 IMPROVEMENTS
 
+PIG-1156: Add aliases to ExecJobs and PhysicalOperators (dvryaboy via gates)
+
 PIG-1161: add missing license headers (dvryaboy via olgan)
 
 PIG-965: PERFORMANCE: optimize common case in matches (PORegex) (ankit.modi

Modified: 
hadoop/pig/trunk/src/org/apache/pig/backend/executionengine/ExecJob.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/executionengine/ExecJob.java?rev=892396&r1=892395&r2=892396&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/backend/executionengine/ExecJob.java 
(original)
+++ hadoop/pig/trunk/src/org/apache/pig/backend/executionengine/ExecJob.java 
Fri Dec 18 23:15:57 2009
@@ -66,6 +66,11 @@
     public Iterator<Tuple> getResults() throws ExecException;
 
     /**
+     * Returns the alias of relation generated by this job
+     */
+    public String getAlias() throws ExecException;
+    
+    /**
      * Get configuration information
      * 
      * @return configuration information for the execution engine

Modified: 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java?rev=892396&r1=892395&r2=892396&view=diff
==============================================================================
--- 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
 (original)
+++ 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
 Fri Dec 18 23:15:57 2009
@@ -51,9 +51,11 @@
 import org.apache.pig.impl.io.FileSpec;
 import org.apache.pig.impl.logicalLayer.LogicalPlan;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.LogToPhyTranslationVisitor;
+import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
 import org.apache.pig.impl.plan.OperatorKey;
 import 
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
+import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore;
 import org.apache.pig.impl.plan.VisitorException;
 import org.apache.pig.tools.pigstats.PigStats;
 
@@ -238,15 +240,26 @@
         MapReduceLauncher launcher = new MapReduceLauncher();
         List<ExecJob> jobs = new ArrayList<ExecJob>();
 
+        Map<String, PhysicalOperator> leafMap = new HashMap<String, 
PhysicalOperator>();
+        for (PhysicalOperator physOp : plan.getLeaves()) {
+            log.info(physOp);
+            if (physOp instanceof POStore) {
+                FileSpec spec = ((POStore) physOp).getSFile();
+                if (spec != null)
+                    leafMap.put(spec.toString(), physOp);
+            }
+        }
         try {
             PigStats stats = launcher.launchPig(plan, jobName, pigContext);
 
             for (FileSpec spec: launcher.getSucceededFiles()) {
-                jobs.add(new HJob(ExecJob.JOB_STATUS.COMPLETED, pigContext, 
spec, stats));
+                String alias = leafMap.containsKey(spec.toString()) ? 
leafMap.get(spec.toString()).getAlias() : null;
+                jobs.add(new HJob(ExecJob.JOB_STATUS.COMPLETED, pigContext, 
spec, alias, stats));
             }
 
             for (FileSpec spec: launcher.getFailedFiles()) {
-                HJob j = new HJob(ExecJob.JOB_STATUS.FAILED, pigContext, spec, 
stats);
+                String alias = leafMap.containsKey(spec.toString()) ? 
leafMap.get(spec.toString()).getAlias() : null;
+                HJob j = new HJob(ExecJob.JOB_STATUS.FAILED, pigContext, spec, 
alias, stats);
                 j.setException(launcher.getError(spec));
                 jobs.add(j);
             }

Modified: 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HJob.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HJob.java?rev=892396&r1=892395&r2=892396&view=diff
==============================================================================
--- 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HJob.java 
(original)
+++ 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HJob.java 
Fri Dec 18 23:15:57 2009
@@ -46,23 +46,28 @@
     protected PigContext pigContext;
     protected FileSpec outFileSpec;
     protected Exception backendException;
+    protected String alias;
     private PigStats stats;
     
     public HJob(JOB_STATUS status,
                 PigContext pigContext,
-                FileSpec outFileSpec) {
+                FileSpec outFileSpec,
+                String alias) {
         this.status = status;
         this.pigContext = pigContext;
         this.outFileSpec = outFileSpec;
+        this.alias = alias;
     }
     
     public HJob(JOB_STATUS status,
             PigContext pigContext,
             FileSpec outFileSpec,
+            String alias,
             PigStats stats) {
         this.status = status;
         this.pigContext = pigContext;
         this.outFileSpec = outFileSpec;
+        this.alias = alias;
         this.stats = stats;
     }
     
@@ -170,4 +175,9 @@
     public Exception getException() {
         return backendException;
     }
+
+    @Override
+    public String getAlias() throws ExecException {
+        return alias;
+    }
 }

Modified: 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/LogToPhyTranslationVisitor.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/LogToPhyTranslationVisitor.java?rev=892396&r1=892395&r2=892396&view=diff
==============================================================================
--- 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/LogToPhyTranslationVisitor.java
 (original)
+++ 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/LogToPhyTranslationVisitor.java
 Fri Dec 18 23:15:57 2009
@@ -102,6 +102,7 @@
         BinaryComparisonOperator exprOp = new GreaterThanExpr(new OperatorKey(
                 scope, nodeGen.getNextNodeId(scope)), op
                 .getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setOperandType(op.getLhsOperand().getType());
         exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
@@ -133,6 +134,7 @@
         BinaryComparisonOperator exprOp = new LessThanExpr(new OperatorKey(
                 scope, nodeGen.getNextNodeId(scope)), op
                 .getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setOperandType(op.getLhsOperand().getType());
         exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
@@ -163,6 +165,7 @@
         BinaryComparisonOperator exprOp = new GTOrEqualToExpr(new OperatorKey(
                 scope, nodeGen.getNextNodeId(scope)), op
                 .getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setOperandType(op.getLhsOperand().getType());
         exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
@@ -192,6 +195,7 @@
         BinaryComparisonOperator exprOp = new LTOrEqualToExpr(new OperatorKey(
                 scope, nodeGen.getNextNodeId(scope)), op
                 .getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setOperandType(op.getLhsOperand().getType());
         exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
@@ -221,6 +225,7 @@
         BinaryComparisonOperator exprOp = new EqualToExpr(new OperatorKey(
                 scope, nodeGen.getNextNodeId(scope)), op
                 .getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setOperandType(op.getLhsOperand().getType());
         exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
@@ -250,6 +255,7 @@
         BinaryComparisonOperator exprOp = new NotEqualToExpr(new OperatorKey(
                 scope, nodeGen.getNextNodeId(scope)), op
                 .getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setOperandType(op.getLhsOperand().getType());
         exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
@@ -279,6 +285,7 @@
         BinaryComparisonOperator exprOp =
             new PORegexp(new OperatorKey(scope, nodeGen.getNextNodeId(scope)),
             op.getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setLhs((ExpressionOperator)logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator)logToPhyMap.get(op.getRhsOperand()));
         LogicalPlan lp = op.getPlan();
@@ -306,6 +313,7 @@
         String scope = op.getOperatorKey().scope;
         BinaryExpressionOperator exprOp = new Add(new OperatorKey(scope,
                 nodeGen.getNextNodeId(scope)), op.getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setResultType(op.getType());
         exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
@@ -334,6 +342,7 @@
         String scope = op.getOperatorKey().scope;
         BinaryExpressionOperator exprOp = new Subtract(new OperatorKey(scope,
                 nodeGen.getNextNodeId(scope)), op.getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setResultType(op.getType());
         exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
@@ -362,6 +371,7 @@
         String scope = op.getOperatorKey().scope;
         BinaryExpressionOperator exprOp = new Multiply(new OperatorKey(scope,
                 nodeGen.getNextNodeId(scope)), op.getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setResultType(op.getType());
         exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
@@ -390,6 +400,7 @@
         String scope = op.getOperatorKey().scope;
         BinaryExpressionOperator exprOp = new Divide(new OperatorKey(scope,
                 nodeGen.getNextNodeId(scope)), op.getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setResultType(op.getType());
         exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
@@ -418,6 +429,7 @@
         String scope = op.getOperatorKey().scope;
         BinaryExpressionOperator exprOp = new Mod(new OperatorKey(scope,
                 nodeGen.getNextNodeId(scope)), op.getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setResultType(op.getType());
         exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
@@ -445,6 +457,7 @@
     public void visit(LOAnd op) throws VisitorException {
         String scope = op.getOperatorKey().scope;
         BinaryComparisonOperator exprOp = new POAnd(new OperatorKey(scope, 
nodeGen.getNextNodeId(scope)), op.getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setLhs((ExpressionOperator)logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator)logToPhyMap.get(op.getRhsOperand()));
         LogicalPlan lp = op.getPlan();
@@ -470,6 +483,7 @@
     public void visit(LOOr op) throws VisitorException {
         String scope = op.getOperatorKey().scope;
         BinaryComparisonOperator exprOp = new POOr(new OperatorKey(scope, 
nodeGen.getNextNodeId(scope)), op.getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setLhs((ExpressionOperator)logToPhyMap.get(op.getLhsOperand()));
         exprOp.setRhs((ExpressionOperator)logToPhyMap.get(op.getRhsOperand()));
         LogicalPlan lp = op.getPlan();
@@ -495,6 +509,7 @@
     public void visit(LONot op) throws VisitorException {
         String scope = op.getOperatorKey().scope;
         UnaryComparisonOperator exprOp = new PONot(new OperatorKey(scope, 
nodeGen.getNextNodeId(scope)), op.getRequestedParallelism());
+        exprOp.setAlias(op.getAlias());
         exprOp.setExpr((ExpressionOperator)logToPhyMap.get(op.getOperand()));
         LogicalPlan lp = op.getPlan();
         
@@ -521,9 +536,10 @@
         POGlobalRearrange poGlobal = new POGlobalRearrange(new OperatorKey(
                 scope, nodeGen.getNextNodeId(scope)), cs
                 .getRequestedParallelism());
+        poGlobal.setAlias(cs.getAlias());
         POPackage poPackage = new POPackage(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)), cs.getRequestedParallelism());
-
+        poGlobal.setAlias(cs.getAlias());
         currentPlan.add(poGlobal);
         currentPlan.add(poPackage);
         
@@ -554,6 +570,7 @@
                 
 
                 POUserFunc gfc = new POUserFunc(new OperatorKey(scope, 
nodeGen.getNextNodeId(scope)),cs.getRequestedParallelism(), 
Arrays.asList((PhysicalOperator)ce1,(PhysicalOperator)ce2), new 
FuncSpec(GFCross.class.getName()));
+                gfc.setAlias(cs.getAlias());
                 gfc.setResultType(DataType.BAG);
                 fep1.addAsLeaf(gfc);
                 
gfc.setInputs(Arrays.asList((PhysicalOperator)ce1,(PhysicalOperator)ce2));
@@ -563,6 +580,7 @@
                 
                 PhysicalPlan fep2 = new PhysicalPlan();
                 POProject feproj = new POProject(new OperatorKey(scope, 
nodeGen.getNextNodeId(scope)), cs.getRequestedParallelism());
+                feproj.setAlias(cs.getAlias());
                 feproj.setResultType(DataType.TUPLE);
                 feproj.setStar(true);
                 feproj.setOverloaded(false);
@@ -570,16 +588,19 @@
                 List<PhysicalPlan> fePlans = Arrays.asList(fep1, fep2);
                 
                 POForEach fe = new POForEach(new OperatorKey(scope, 
nodeGen.getNextNodeId(scope)), cs.getRequestedParallelism(), fePlans, 
flattenLst );
+                fe.setAlias(cs.getAlias());
                 currentPlan.add(fe);
                 currentPlan.connect(logToPhyMap.get(op), fe);
                 
                 POLocalRearrange physOp = new POLocalRearrange(new OperatorKey(
                         scope, nodeGen.getNextNodeId(scope)), cs
                         .getRequestedParallelism());
+                physOp.setAlias(cs.getAlias());
                 List<PhysicalPlan> lrPlans = new ArrayList<PhysicalPlan>();
                 for(int i=0;i<inputs.size();i++){
                     PhysicalPlan lrp1 = new PhysicalPlan();
                     POProject lrproj1 = new POProject(new OperatorKey(scope, 
nodeGen.getNextNodeId(scope)), cs.getRequestedParallelism(), i);
+                    lrproj1.setAlias(cs.getAlias());
                     lrproj1.setOverloaded(false);
                     lrproj1.setResultType(DataType.INTEGER);
                     lrp1.add(lrproj1);
@@ -620,6 +641,7 @@
         for(int i=1;i<=count;i++){
             PhysicalPlan fep1 = new PhysicalPlan();
             POProject feproj1 = new POProject(new OperatorKey(scope, 
nodeGen.getNextNodeId(scope)), cs.getRequestedParallelism(), i);
+            feproj1.setAlias(cs.getAlias());
             feproj1.setResultType(DataType.BAG);
             feproj1.setOverloaded(false);
             fep1.add(feproj1);
@@ -628,6 +650,7 @@
         }
         
         POForEach fe = new POForEach(new OperatorKey(scope, 
nodeGen.getNextNodeId(scope)), cs.getRequestedParallelism(), fePlans, 
flattenLst );
+        fe.setAlias(cs.getAlias());
         currentPlan.add(fe);
         try{
             currentPlan.connect(poPackage, fe);
@@ -658,9 +681,10 @@
         
         POGlobalRearrange poGlobal = new POGlobalRearrange(new OperatorKey(
                 scope, nodeGen.getNextNodeId(scope)), 
cg.getRequestedParallelism());
+        poGlobal.setAlias(cg.getAlias());
         POPackage poPackage = new POPackage(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)), cg.getRequestedParallelism());
-
+        poPackage.setAlias(cg.getAlias());
         currentPlan.add(poGlobal);
         currentPlan.add(poPackage);
 
@@ -677,8 +701,8 @@
         for (LogicalOperator op : inputs) {
             List<LogicalPlan> plans = 
(List<LogicalPlan>)cg.getGroupByPlans().get(op);
             POLocalRearrange physOp = new POLocalRearrange(new OperatorKey(
-                    scope, nodeGen.getNextNodeId(scope)), cg
-                    .getRequestedParallelism());
+                    scope, nodeGen.getNextNodeId(scope)), 
cg.getRequestedParallelism());
+            physOp.setAlias(cg.getAlias());
             List<PhysicalPlan> exprPlans = new ArrayList<PhysicalPlan>();
             currentPlans.push(currentPlan);
             for (LogicalPlan lp : plans) {
@@ -742,7 +766,7 @@
         List<LogicalPlan> plans = (List<LogicalPlan>) 
cg.getGroupByPlans().get(op);
         POCollectedGroup physOp = new POCollectedGroup(new OperatorKey(
                 scope, nodeGen.getNextNodeId(scope)));
-        
+        physOp.setAlias(cg.getAlias());
         List<PhysicalPlan> exprPlans = new ArrayList<PhysicalPlan>();
         currentPlans.push(currentPlan);
         for (LogicalPlan lp : plans) {
@@ -836,6 +860,7 @@
                        try {
                                skj = new POSkewedJoin(new 
OperatorKey(scope,nodeGen.getNextNodeId(scope)),loj.getRequestedParallelism(),
                                                                                
        inp, loj.getInnerFlags());
+                               skj.setAlias(loj.getAlias());
                                skj.setJoinPlans(joinPlans);
                        }
                        catch (Exception e) {
@@ -923,6 +948,7 @@
                    
                    pfrj = new POFRJoin(new 
OperatorKey(scope,nodeGen.getNextNodeId(scope)),loj.getRequestedParallelism(),
                                                inp, ppLists, keyTypes, null, 
fragment, isLeftOuter, nullTuple);
+                   pfrj.setAlias(loj.getAlias());
                } catch (ExecException e1) {
                    int errCode = 2058;
                    String msg = "Unable to set index on newly create 
POLocalRearrange.";
@@ -973,9 +999,10 @@
                POGlobalRearrange poGlobal = new POGlobalRearrange(new 
OperatorKey(
                        scope, nodeGen.getNextNodeId(scope)), loj
                        .getRequestedParallelism());
+               poGlobal.setAlias(loj.getAlias());
                POPackage poPackage = new POPackage(new OperatorKey(scope, 
nodeGen
                        .getNextNodeId(scope)), loj.getRequestedParallelism());
-
+               poPackage.setAlias(loj.getAlias());
                currentPlan.add(poGlobal);
                currentPlan.add(poPackage);
                
@@ -1060,6 +1087,7 @@
                    PhysicalPlan fep1 = new PhysicalPlan();
                    POProject feproj1 = new POProject(new OperatorKey(scope, 
nodeGen.getNextNodeId(scope)), 
                            loj.getRequestedParallelism(), i+1); //i+1 since 
the first column is the "group" field
+                   feproj1.setAlias(loj.getAlias());
                    feproj1.setResultType(DataType.BAG);
                    feproj1.setOverloaded(false);
                    fep1.add(feproj1);
@@ -1080,6 +1108,7 @@
                
                POForEach fe = new POForEach(new OperatorKey(scope, 
nodeGen.getNextNodeId(scope)), 
                        loj.getRequestedParallelism(), fePlans, flattenLst );
+               fe.setAlias(loj.getAlias());
                currentPlan.add(fe);
                    currentPlan.connect(poPackage, fe);
                    logToPhyMap.put(loj, fe);
@@ -1155,6 +1184,7 @@
         String scope = filter.getOperatorKey().scope;
         POFilter poFilter = new POFilter(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)), filter.getRequestedParallelism());
+        poFilter.setAlias(filter.getAlias());
         poFilter.setResultType(filter.getType());
         currentPlan.add(poFilter);
         logToPhyMap.put(filter, poFilter);
@@ -1197,6 +1227,7 @@
         POStream poStream = new POStream(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)), stream.getExecutableManager(), 
                 stream.getStreamingCommand(), this.pc.getProperties());
+        poStream.setAlias(stream.getAlias());
         currentPlan.add(poStream);
         logToPhyMap.put(stream, poStream);
         
@@ -1232,6 +1263,7 @@
             exprOp = new POProject(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)), op.getRequestedParallelism());
         }
+        exprOp.setAlias(op.getAlias());
         exprOp.setResultType(op.getType());
         exprOp.setColumns((ArrayList)op.getProjection());
         exprOp.setStar(op.isStar());
@@ -1282,6 +1314,7 @@
         POForEach poFE = new POForEach(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)), g.getRequestedParallelism(), 
innerPlans,
                 g.getFlatten());
+        poFE.setAlias(g.getAlias());
         poFE.setResultType(g.getType());
         logToPhyMap.put(g, poFE);
         currentPlan.add(poFE);
@@ -1337,6 +1370,7 @@
                     .getNextNodeId(scope)), s.getRequestedParallelism(), null,
                     sortPlans, s.getAscendingCols(), comparator);
         }
+        sort.setAlias(s.getAlias());
         sort.setLimit(s.getLimit());
         // sort.setRequestedParallelism(s.getType());
         logToPhyMap.put(s, sort);
@@ -1377,6 +1411,7 @@
         // push it in the current plan and make the connections
         PhysicalOperator physOp = new PODistinct(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)), op.getRequestedParallelism());
+        physOp.setAlias(op.getAlias());
         physOp.setResultType(op.getType());
         logToPhyMap.put(op, physOp);
         currentPlan.add(physOp);
@@ -1406,6 +1441,7 @@
         String scope = split.getOperatorKey().scope;
         PhysicalOperator physOp = new POSplit(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)), split.getRequestedParallelism());
+        physOp.setAlias(split.getAlias());
         FileSpec splStrFile;
         try {
             splStrFile = new FileSpec(FileLocalizer.getTemporaryPath(null, 
pc).toString(),new FuncSpec(BinStorage.class.getName()));
@@ -1457,6 +1493,7 @@
         String scope = split.getOperatorKey().scope;
         PhysicalOperator physOp = new POFilter(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)), split.getRequestedParallelism());
+        physOp.setAlias(split.getAlias());
         logToPhyMap.put(split, physOp);
 
         currentPlan.add(physOp);
@@ -1506,6 +1543,7 @@
                     .getNextNodeId(scope)), func.getRequestedParallelism(),
                     null, func.getFuncSpec(), (ComparisonFunc) f);
         }
+        p.setAlias(func.getAlias());
         p.setResultType(func.getType());
         currentPlan.add(p);
         List<org.apache.pig.impl.logicalLayer.ExpressionOperator> fromList = 
func.getArguments();
@@ -1530,6 +1568,7 @@
         String scope = loLoad.getOperatorKey().scope;
         POLoad load = new POLoad(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)), loLoad.isSplittable());
+        load.setAlias(loLoad.getAlias());
         load.setLFile(loLoad.getInputFile());
         load.setPc(pc);
         load.setResultType(loLoad.getType());
@@ -1561,6 +1600,7 @@
         
         POStore store = new POStore(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)));
+        
store.setAlias(loStore.getPlan().getPredecessors(loStore).get(0).getAlias());
         store.setSFile(loStore.getOutputFile());
         store.setInputSpec(loStore.getInputSpec());
         try {
@@ -1618,6 +1658,7 @@
         String scope = op.getOperatorKey().scope;
         ConstantExpression ce = new ConstantExpression(new OperatorKey(scope,
                 nodeGen.getNextNodeId(scope)));
+        ce.setAlias(op.getAlias());
         ce.setValue(op.getValue());
         ce.setResultType(op.getType());
         //this operator doesn't have any predecessors
@@ -1630,6 +1671,7 @@
         String scope = op.getOperatorKey().scope;
         ExpressionOperator physOp = new POBinCond(new OperatorKey(scope,
                 nodeGen.getNextNodeId(scope)), op.getRequestedParallelism());
+        physOp.setAlias(op.getAlias());
         logToPhyMap.put(op, physOp);
         POBinCond phy = (POBinCond) physOp;
         ExpressionOperator cond = (ExpressionOperator) logToPhyMap.get(op
@@ -1665,6 +1707,7 @@
         ExpressionOperator physOp = new PONegative(new OperatorKey(scope,
                 nodeGen.getNextNodeId(scope)), op.getRequestedParallelism(),
                 null);
+        physOp.setAlias(op.getAlias());
         currentPlan.add(physOp);
 
         logToPhyMap.put(op, physOp);
@@ -1697,7 +1740,7 @@
         String scope = op.getOperatorKey().scope;
         UnaryComparisonOperator physOp = new POIsNull(new OperatorKey(scope, 
nodeGen
                 .getNextNodeId(scope)), op.getRequestedParallelism(), null);
-
+        physOp.setAlias(op.getAlias());
         List<LogicalOperator> inputs = op.getPlan().getPredecessors(op); 
         ExpressionOperator from;
         
@@ -1734,6 +1777,7 @@
                 nodeGen.getNextNodeId(scope)), op.getRequestedParallelism(), op
                 .getLookUpKey());
         physOp.setResultType(op.getType());
+        physOp.setAlias(op.getAlias());
         currentPlan.add(physOp);
 
         logToPhyMap.put(op, physOp);
@@ -1755,6 +1799,7 @@
         String scope = op.getOperatorKey().scope;
         ExpressionOperator physOp = new POCast(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)), op.getRequestedParallelism());
+        physOp.setAlias(op.getAlias());
         currentPlan.add(physOp);
 
         logToPhyMap.put(op, physOp);
@@ -1781,6 +1826,7 @@
             POLimit poLimit = new POLimit(new OperatorKey(scope, 
nodeGen.getNextNodeId(scope)), limit.getRequestedParallelism());
             poLimit.setResultType(limit.getType());
             poLimit.setLimit(limit.getLimit());
+            poLimit.setAlias(limit.getAlias());
             currentPlan.add(poLimit);
             logToPhyMap.put(limit, poLimit);
 
@@ -1808,6 +1854,7 @@
         String scope = op.getOperatorKey().scope;
         POUnion physOp = new POUnion(new OperatorKey(scope, nodeGen
                 .getNextNodeId(scope)), op.getRequestedParallelism());
+        physOp.setAlias(op.getAlias());
         currentPlan.add(physOp);
         physOp.setResultType(op.getType());
         logToPhyMap.put(op, physOp);

Modified: 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/PhysicalOperator.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/PhysicalOperator.java?rev=892396&r1=892395&r2=892396&view=diff
==============================================================================
--- 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/PhysicalOperator.java
 (original)
+++ 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/PhysicalOperator.java
 Fri Dec 18 23:15:57 2009
@@ -89,6 +89,10 @@
     // The result of performing the operation along with the output
     protected Result res = null;
     
+    
+    // alias associated with this PhysicalOperator
+    protected String alias = null;
+    
     // Will be used by operators to report status or transmit heartbeat
     // Should be set by the backends to appropriate implementations that
     // wrap their own version of a reporter.
@@ -161,6 +165,14 @@
         return resultType;
     }
 
+    public String getAlias() {
+        return alias;
+    }
+    
+    public void setAlias(String alias) {
+        this.alias = alias;
+    }
+    
     public void setAccumulative() {            
         accum = true;
     }

Modified: 
hadoop/pig/trunk/src/org/apache/pig/backend/local/executionengine/LocalJob.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/local/executionengine/LocalJob.java?rev=892396&r1=892395&r2=892396&view=diff
==============================================================================
--- 
hadoop/pig/trunk/src/org/apache/pig/backend/local/executionengine/LocalJob.java 
(original)
+++ 
hadoop/pig/trunk/src/org/apache/pig/backend/local/executionengine/LocalJob.java 
Fri Dec 18 23:15:57 2009
@@ -44,6 +44,7 @@
     protected JOB_STATUS status;
     protected PigContext pigContext;
     protected FileSpec outFileSpec;
+    protected String alias;
     private PigStats stats;
     
     public LocalJob(JOB_STATUS status,
@@ -162,4 +163,9 @@
     public Exception getException() {
         return null;
     }
+
+    @Override
+    public String getAlias() throws ExecException {
+        return alias;
+    }
 }

Added: hadoop/pig/trunk/test/org/apache/pig/test/TestBatchAliases.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/test/org/apache/pig/test/TestBatchAliases.java?rev=892396&view=auto
==============================================================================
--- hadoop/pig/trunk/test/org/apache/pig/test/TestBatchAliases.java (added)
+++ hadoop/pig/trunk/test/org/apache/pig/test/TestBatchAliases.java Fri Dec 18 
23:15:57 2009
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.pig.test;
+
+import java.io.IOException;
+import java.util.List;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.pig.ExecType;
+import org.apache.pig.PigServer;
+import org.apache.pig.backend.executionengine.ExecJob;
+import org.apache.pig.impl.io.FileLocalizer;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestBatchAliases extends TestCase {
+
+    private static final MiniCluster cluster = MiniCluster.buildCluster();
+
+    private PigServer myPig;
+
+    @Before
+    public void setUp() throws Exception {
+        cluster.setProperty("opt.multiquery", ""+true);
+        myPig = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
+        deleteOutputFiles();
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        myPig = null;
+    }
+    
+    @Test
+    public void testBatchAliases() {
+
+        // test case: key ('group') isn't part of foreach output
+        // and keys have the same type.
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 
'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, 
passwd:chararray, uid:int, gid:int);");
+            myPig.registerQuery("b = group a by uid;");
+            myPig.registerQuery("c = group a by gid;");
+            myPig.registerQuery("d = foreach b generate SUM(a.gid);");
+            myPig.registerQuery("e = foreach c generate group, COUNT(a);");
+            myPig.registerQuery("store d into '/tmp/output1';");
+            myPig.registerQuery("store e into '/tmp/output2';");
+
+            List<ExecJob> jobs = myPig.executeBatch();
+            boolean foundD=false;
+            boolean foundE=false;
+            for (ExecJob job : jobs) {
+                assertTrue(job.getStatus() == ExecJob.JOB_STATUS.COMPLETED);
+                foundD = foundD || "d".equals(job.getAlias());
+                foundE = foundE || "e".equals(job.getAlias());
+            }
+            assertTrue(foundD);
+            assertTrue(foundE);
+            
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } 
+    }
+
+    private void deleteOutputFiles() {
+        try {
+            FileLocalizer.delete("/tmp/output1", myPig.getPigContext());
+            FileLocalizer.delete("/tmp/output2", myPig.getPigContext());
+            FileLocalizer.delete("/tmp/output3", myPig.getPigContext());
+            FileLocalizer.delete("/tmp/output4", myPig.getPigContext());
+            FileLocalizer.delete("/tmp/output5", myPig.getPigContext());
+        } catch (IOException e) {
+            e.printStackTrace();
+            Assert.fail();
+        }
+    }
+}

Modified: hadoop/pig/trunk/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld?rev=892396&r1=892395&r2=892396&view=diff
==============================================================================
--- hadoop/pig/trunk/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld 
(original)
+++ hadoop/pig/trunk/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld Fri 
Dec 18 23:15:57 2009
@@ -20,4 +20,4 @@
     |   |   |
     |   |   Constant(all) - scope-127
     |   |
-    |   
|---Load(file:/tmp/input2:org.apache.pig.impl.builtin.MergeJoinIndexer('org.apache.pig.builtin.PigStorage','kmonaaafhdhcaabdgkgbhggbcohfhegjgmcoebhchcgbhjemgjhdhehiibncbnjjmhgbjnadaaabejaaaehdgjhkgfhihaaaaaaaabhhaeaaaaaaabhdhcaaeogphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccohagmgbgohdcofagihjhdgjgdgbgmfagmgbgoaaaaaaaaaaaaaaabacaaabfkaaangfgogeepggebgmgmejgohahfhehihcaacfgphcghcogbhagbgdgigfcohagjghcogjgnhagmcohagmgbgocoephagfhcgbhegphcfagmgbgohlinhnlnngaiiihoacaaagemaaakgneghcgpgnefgeghgfhdheaacdemgphcghcpgbhagbgdgigfcphagjghcpgjgnhagmcphfhegjgmcpenhfgmhegjengbhadlemaaafgnelgfhjhdheaaapemgkgbhggbcphfhegjgmcpengbhadlemaaahgnemgfgbhggfhdheaabaemgkgbhggbcphfhegjgmcpemgjhdhedlemaaaegnephahdhbaahoaaafemaaaggnfcgpgphehdhbaahoaaagemaaaignfegpefgeghgfhdhbaahoaaaehihahdhcaacbgphcghcogbhagbgdgigfcohagjghcogjgnhagmcohfhegjgmcoenhfgmhegjengbhaaaaaaaaaaaaaaaacacaaabemaaaegnengbhahbaahoaaafhihahdhcaabbgkgbh
 
ggbcohfhegjgmcoeigbhdgiengbhaafahnkmbmdbgganbadaaacegaaakgmgpgbgeeggbgdhegphcejaaajhegihcgfhdgigpgmgehihadpeaaaaaaaaaaaamhhaiaaaaaabaaaaaaaaahihdhbaahoaaakdpeaaaaaaaaaaaamhhaiaaaaaabaaaaaaaabhdhcaacegphcghcogbhagbgdgigfcohagjghcogjgnhagmcohagmgbgocoephagfhcgbhegphcelgfhjaaaaaaaaaaaaaaabacaaacekaaacgjgeemaaafhdgdgphagfheaabcemgkgbhggbcpgmgbgoghcpfdhehcgjgoghdlhihaaaaaaaaaaaaaaahiheaaafhdgdgphagfhdhcaafjgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccogfhihahcgfhdhdgjgpgoephagfhcgbhegphchdcofaepfahcgpgkgfgdheaaaaaaaaaaaaaaabacaaaffkaaakgphggfhcgmgpgbgegfgefkaabfhahcgpgdgfhdhdgjgoghecgbghepggfehfhagmgfhdfkaabehcgfhdhfgmhefdgjgoghgmgffehfhagmgfecgbghfkaaaehdhegbhcemaaahgdgpgmhfgngohdheaabfemgkgbhggbcphfhegjgmcpebhchcgbhjemgjhdhedlhihcaagcgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccogfhihahcgfhdhdgjgpgoephagfhcgbhegphchdcoefhihahcgfhdhd
 
gjgpgoephagfhcgbhegphcaaaaaaaaaaaaaaabacaaabemaaadgmgpghheaacaemgphcghcpgbhagbgdgigfcpgdgpgngngpgohdcpgmgpghghgjgoghcpemgpghdlhihcaaemgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccofagihjhdgjgdgbgmephagfhcgbhegphcaaaaaaaaaaaaaaabacaaalfkaaafgbgdgdhfgnfkaaangjgohahfheebhehegbgdgigfgeejaabehcgfhbhfgfhdhegfgefagbhcgbgmgmgfgmgjhdgnecaaakhcgfhdhfgmhefehjhagfemaaafgjgohahfheheaablemgphcghcpgbhagbgdgigfcphagjghcpgegbhegbcpfehfhagmgfdlemaaaggjgohahfhehdhbaahoaaagemaaangmgjgogfgbghgffehcgbgdgfhcheaachemgphcghcpgbhagbgdgigfcphagjghcphagfgocphfhegjgmcpemgjgogfgbghgffehcgbgdgfhcdlemaaadgmgpghhbaahoaabeemaaahgphfhehahfhehdhbaahoaaagemaaakhagbhcgfgohefagmgbgoheaafaemgphcghcpgbhagbgdgigfcphagjghcpgcgbgdglgfgogecpgigbgegpgphacpgfhigfgdhfhegjgpgogfgoghgjgogfcphagihjhdgjgdgbgmemgbhjgfhccphagmgbgohdcpfagihjhdgjgdgbgmfagmgbgodlemaaadhcgfhdheaaeeemgphcghcpgbhagbgdgigfcphagjghcpgcgbgdglgfgogecpgigbgegpgphacpgfhigfgdhfhegjgpg
 
ogfgoghgjgogfcphagihjhdgjgdgbgmemgbhjgfhccpfcgfhdhfgmhedlhihcaacbgphcghcogbhagbgdgigfcohagjghcogjgnhagmcohagmgbgocoephagfhcgbhegphcaaaaaaaaaaaaaaabacaaabemaaaegnelgfhjheaacgemgphcghcpgbhagbgdgigfcphagjghcpgjgnhagmcphagmgbgocpephagfhcgbhegphcelgfhjdlhihahbaahoaaapaaaappppppppdchahahahdhcaaclgphcghcogbhagbgdgigfcogdgpgngngpgohdcogmgpghghgjgoghcogjgnhagmcoemgpghdeekemgpghghgfhccikmpnoicknfncdiacaaabemaaaegogbgngfhbaahoaaaohihaheaafjgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccogfhihahcgfhdhdgjgpgoephagfhcgbhegphchdcofaepfahcgpgkgfgdhehahahdhcaaecgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccofcgfhdhfgmheaaaaaaaaaaaaaaabacaaacecaaamhcgfhehfhcgofdhegbhehfhdemaaaghcgfhdhfgmheheaabcemgkgbhggbcpgmgbgoghcpepgcgkgfgdhedlhihaachahbaahoaaboaaaaaaaahdhbaahoaaaaaaaaaaabhhaeaaaaaaabhdhcaabbgkgbhggbcogmgbgoghcoejgohegfghgfhcbcockakephibihdiacaaab
 
ejaaafhggbgmhfgfhihcaabagkgbhggbcogmgbgoghcoeohfgngcgfhcigkmjfbnaljeoailacaaaahihaaaaaaaaahihihdhbaahoaaaaaaaaaaabhhaeaaaaaaakhbaahoaabmhihdhbaahoaaakdpeaaaaaaaaaaaamhhaiaaaaaabaaaaaaaabhbaahoaabmhbaahoaaaphihdhbaahoaaaaaaaaaaaahhaeaaaaaaakhihdhbaahoaaaihdhbaahoaaakdpeaaaaaaaaaaaamhhaiaaaaaabaaaaaaaaahiaahi',''))
 - scope-118
\ No newline at end of file
+    |   
|---Load(file:/tmp/input2:org.apache.pig.impl.builtin.MergeJoinIndexer('org.apache.pig.builtin.PigStorage','kmonaaafhdhcaabdgkgbhggbcohfhegjgmcoebhchcgbhjemgjhdhehiibncbnjjmhgbjnadaaabejaaaehdgjhkgfhihaaaaaaaabhhaeaaaaaaabhdhcaaeogphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccohagmgbgohdcofagihjhdgjgdgbgmfagmgbgoaaaaaaaaaaaaaaabacaaabfkaaangfgogeepggebgmgmejgohahfhehihcaacfgphcghcogbhagbgdgigfcohagjghcogjgnhagmcohagmgbgocoephagfhcgbhegphcfagmgbgohlinhnlnngaiiihoacaaagemaaakgneghcgpgnefgeghgfhdheaacdemgphcghcpgbhagbgdgigfcphagjghcpgjgnhagmcphfhegjgmcpenhfgmhegjengbhadlemaaafgnelgfhjhdheaaapemgkgbhggbcphfhegjgmcpengbhadlemaaahgnemgfgbhggfhdheaabaemgkgbhggbcphfhegjgmcpemgjhdhedlemaaaegnephahdhbaahoaaafemaaaggnfcgpgphehdhbaahoaaagemaaaignfegpefgeghgfhdhbaahoaaaehihahdhcaacbgphcghcogbhagbgdgigfcohagjghcogjgnhagmcohfhegjgmcoenhfgmhegjengbhaaaaaaaaaaaaaaaacacaaabemaaaegnengbhahbaahoaaafhihahdhcaabbgkgbh
 
ggbcohfhegjgmcoeigbhdgiengbhaafahnkmbmdbgganbadaaacegaaakgmgpgbgeeggbgdhegphcejaaajhegihcgfhdgigpgmgehihadpeaaaaaaaaaaaamhhaiaaaaaabaaaaaaaaahihdhbaahoaaakdpeaaaaaaaaaaaamhhaiaaaaaabaaaaaaaabhdhcaacegphcghcogbhagbgdgigfcohagjghcogjgnhagmcohagmgbgocoephagfhcgbhegphcelgfhjaaaaaaaaaaaaaaabacaaacekaaacgjgeemaaafhdgdgphagfheaabcemgkgbhggbcpgmgbgoghcpfdhehcgjgoghdlhihaaaaaaaaaaaaaaahiheaaafhdgdgphagfhdhcaafjgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccogfhihahcgfhdhdgjgpgoephagfhcgbhegphchdcofaepfahcgpgkgfgdheaaaaaaaaaaaaaaabacaaaffkaaakgphggfhcgmgpgbgegfgefkaabfhahcgpgdgfhdhdgjgoghecgbghepggfehfhagmgfhdfkaabehcgfhdhfgmhefdgjgoghgmgffehfhagmgfecgbghfkaaaehdhegbhcemaaahgdgpgmhfgngohdheaabfemgkgbhggbcphfhegjgmcpebhchcgbhjemgjhdhedlhihcaagcgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccogfhihahcgfhdhdgjgpgoephagfhcgbhegphchdcoefhihahcgfhdhd
 
gjgpgoephagfhcgbhegphcaaaaaaaaaaaaaaabacaaabemaaadgmgpghheaacaemgphcghcpgbhagbgdgigfcpgdgpgngngpgohdcpgmgpghghgjgoghcpemgpghdlhihcaaemgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccofagihjhdgjgdgbgmephagfhcgbhegphcaaaaaaaaaaaaaaabacaaamfkaaafgbgdgdhfgnfkaaangjgohahfheebhehegbgdgigfgeejaabehcgfhbhfgfhdhegfgefagbhcgbgmgmgfgmgjhdgnecaaakhcgfhdhfgmhefehjhagfemaaafgbgmgjgbhdhbaahoaaaoemaaafgjgohahfheheaablemgphcghcpgbhagbgdgigfcphagjghcpgegbhegbcpfehfhagmgfdlemaaaggjgohahfhehdhbaahoaaagemaaangmgjgogfgbghgffehcgbgdgfhcheaachemgphcghcpgbhagbgdgigfcphagjghcphagfgocphfhegjgmcpemgjgogfgbghgffehcgbgdgfhcdlemaaadgmgpghhbaahoaabeemaaahgphfhehahfhehdhbaahoaaagemaaakhagbhcgfgohefagmgbgoheaafaemgphcghcpgbhagbgdgigfcphagjghcpgcgbgdglgfgogecpgigbgegpgphacpgfhigfgdhfhegjgpgogfgoghgjgogfcphagihjhdgjgdgbgmemgbhjgfhccphagmgbgohdcpfagihjhdgjgdgbgmfagmgbgodlemaaadhcgfhdheaaeeemgphcghcpgbhagbgdgigfcphagjghcpgcgbgdglgfgogecpgigbg
 
egpgphacpgfhigfgdhfhegjgpgogfgoghgjgogfcphagihjhdgjgdgbgmemgbhjgfhccpfcgfhdhfgmhedlhihcaacbgphcghcogbhagbgdgigfcohagjghcogjgnhagmcohagmgbgocoephagfhcgbhegphcaaaaaaaaaaaaaaabacaaabemaaaegnelgfhjheaacgemgphcghcpgbhagbgdgigfcphagjghcpgjgnhagmcphagmgbgocpephagfhcgbhegphcelgfhjdlhihahbaahoaaapaaaappppppppdchahahahahdhcaaclgphcghcogbhagbgdgigfcogdgpgngngpgohdcogmgpghghgjgoghcogjgnhagmcoemgpghdeekemgpghghgfhccikmpnoicknfncdiacaaabemaaaegogbgngfhbaahoaaaohihaheaafjgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccogfhihahcgfhdhdgjgpgoephagfhcgbhegphchdcofaepfahcgpgkgfgdhehahahdhcaaecgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccofcgfhdhfgmheaaaaaaaaaaaaaaabacaaacecaaamhcgfhehfhcgofdhegbhehfhdemaaaghcgfhdhfgmheheaabcemgkgbhggbcpgmgbgoghcpepgcgkgfgdhedlhihaachahbaahoaaboaaaaaaaahdhbaahoaaaaaaaaaaabhhaeaaaaaaabhdhcaabbgkgbhggbcogmgbgoghcoejgohegf
 
ghgfhcbcockakephibihdiacaaabejaaafhggbgmhfgfhihcaabagkgbhggbcogmgbgoghcoeohfgngcgfhcigkmjfbnaljeoailacaaaahihaaaaaaaaahihihdhbaahoaaaaaaaaaaabhhaeaaaaaaakhbaahoaabmhihdhbaahoaaakdpeaaaaaaaaaaaamhhaiaaaaaabaaaaaaaabhbaahoaabmhbaahoaaaphihdhbaahoaaaaaaaaaaaahhaeaaaaaaakhihdhbaahoaaaihdhbaahoaaakdpeaaaaaaaaaaaamhhaiaaaaaabaaaaaaaaahiaahi',''))
 - scope-118
\ No newline at end of file


Reply via email to