Author: gates
Date: Mon Jul 28 15:05:05 2008
New Revision: 680528

URL: http://svn.apache.org/viewvc?rev=680528&view=rev
Log:
PIG-334 Adds DoubleWritable as a type in pig, can be removed once hadoop 
provides DoubleWritable.


Added:
    
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/DoubleWritable.java
    
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/HDataType.java
Modified:
    
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java
    
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigCombiner.java
    
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
    incubator/pig/branches/types/src/org/apache/pig/data/DataType.java
    
incubator/pig/branches/types/test/org/apache/pig/test/TestLogToPhyCompiler.java
    
incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/Limit.gld
    
incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/LimitedSort.gld
    
incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/MRC17.gld

Added: 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/DoubleWritable.java
URL: 
http://svn.apache.org/viewvc/incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/DoubleWritable.java?rev=680528&view=auto
==============================================================================
--- 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/DoubleWritable.java
 (added)
+++ 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/DoubleWritable.java
 Mon Jul 28 15:05:05 2008
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.pig.backend.hadoop;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableComparator;
+
+/**
+ * Writable for Double values.
+ */
+public class DoubleWritable implements WritableComparable {
+
+  private double value = 0.0;
+  
+  public DoubleWritable() {
+    
+  }
+  
+  public DoubleWritable(double value) {
+    set(value);
+  }
+  
+  public void readFields(DataInput in) throws IOException {
+    value = in.readDouble();
+  }
+
+  public void write(DataOutput out) throws IOException {
+    out.writeDouble(value);
+  }
+  
+  public void set(double value) { this.value = value; }
+  
+  public double get() { return value; }
+
+  /**
+   * Returns true iff <code>o</code> is a DoubleWritable with the same value.
+   */
+  public boolean equals(Object o) {
+    if (!(o instanceof DoubleWritable)) {
+      return false;
+    }
+    DoubleWritable other = (DoubleWritable)o;
+    return this.value == other.value;
+  }
+  
+  public int hashCode() {
+    return (int)Double.doubleToLongBits(value);
+  }
+  
+  public int compareTo(Object o) {
+    DoubleWritable other = (DoubleWritable)o;
+    return (value < other.value ? -1 : (value == other.value ? 0 : 1));
+  }
+  
+  public String toString() {
+    return Double.toString(value);
+  }
+
+  /** A Comparator optimized for DoubleWritable. */ 
+  public static class Comparator extends WritableComparator {
+    public Comparator() {
+      super(DoubleWritable.class);
+    }
+
+    public int compare(byte[] b1, int s1, int l1,
+                       byte[] b2, int s2, int l2) {
+      double thisValue = readDouble(b1, s1);
+      double thatValue = readDouble(b2, s2);
+      return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
+    }
+  }
+
+  static {                                        // register this comparator
+    WritableComparator.define(DoubleWritable.class, new Comparator());
+  }
+
+}
+

Added: 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/HDataType.java
URL: 
http://svn.apache.org/viewvc/incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/HDataType.java?rev=680528&view=auto
==============================================================================
--- 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/HDataType.java 
(added)
+++ 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/HDataType.java 
Mon Jul 28 15:05:05 2008
@@ -0,0 +1,175 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.pig.backend.hadoop;
+
+import java.util.Map;
+
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+
+import org.apache.pig.backend.executionengine.ExecException;
+import org.apache.pig.backend.hadoop.DoubleWritable;
+import org.apache.pig.data.BagFactory;
+import org.apache.pig.data.DataBag;
+import org.apache.pig.data.DataType;
+import org.apache.pig.data.DataByteArray;
+import org.apache.pig.data.Tuple;
+import org.apache.pig.data.TupleFactory;
+
+/**
+ * A class of helper methods for converting from pig data types to hadoop
+ * data types, and vice versa.
+ */
+public class HDataType {
+    static BooleanWritable boolWrit = new BooleanWritable();
+    static BytesWritable bytesWrit = new BytesWritable();
+    static Text stringWrit = new Text();
+    static FloatWritable floatWrit = new FloatWritable();
+    static DoubleWritable doubleWrit = new DoubleWritable();
+    static IntWritable intWrit = new IntWritable();
+    static LongWritable longWrit = new LongWritable();
+    static DataBag defDB = BagFactory.getInstance().newDefaultBag();
+    static Tuple defTup = TupleFactory.getInstance().newTuple();
+    static Map<Byte, String> typeToName = null;
+
+    public static WritableComparable getWritableComparableTypes(Object o) 
throws ExecException{
+        WritableComparable wcKey = null;
+        if (typeToName == null) typeToName = DataType.genTypeToNameMap();
+        byte type = DataType.findType(o);
+        switch (type) {
+        case DataType.BAG:
+            wcKey = (DataBag) o;
+            break;
+        case DataType.BOOLEAN:
+            boolWrit.set((Boolean)o);
+            wcKey = boolWrit;
+            break;
+        case DataType.BYTEARRAY:
+            byte[] dbaBytes = ((DataByteArray) o).get();
+            bytesWrit.set(dbaBytes,0,dbaBytes.length);
+            wcKey = bytesWrit;
+            break;
+        case DataType.CHARARRAY:
+            stringWrit.set((String) o);
+            wcKey = stringWrit;
+            break;
+        case DataType.DOUBLE:
+            doubleWrit.set((Double) o);
+            wcKey = doubleWrit;
+            break;
+        case DataType.FLOAT:
+            floatWrit.set((Float) o);
+            wcKey = floatWrit;
+            break;
+        case DataType.INTEGER:
+            intWrit.set((Integer) o);
+            wcKey = intWrit;
+            break;
+        case DataType.LONG:
+            longWrit.set((Long) o);
+            wcKey = longWrit;
+            break;
+        case DataType.TUPLE:
+            wcKey = (Tuple) o;
+            break;
+//        case DataType.MAP:
+            // Hmm, This is problematic
+            // Need a deep clone to convert a Map into
+            // MapWritable
+            // wcKey = new MapWritable();
+//            break;
+        default:
+            throw new ExecException("The type "
+                    + typeToName.get(type)
+                    + " cannot be collected as a Key type");
+        }
+        return wcKey;
+    }
+    
+    public static WritableComparable getWritableComparableTypes(byte type) 
throws ExecException{
+        WritableComparable wcKey = null;
+        if (typeToName == null) typeToName = DataType.genTypeToNameMap();
+         switch (type) {
+        case DataType.BAG:
+            wcKey = defDB;
+            break;
+        case DataType.BOOLEAN:
+            wcKey = boolWrit;
+            break;
+        case DataType.BYTEARRAY:
+            wcKey = bytesWrit;
+            break;
+        case DataType.CHARARRAY:
+            wcKey = stringWrit;
+            break;
+        case DataType.DOUBLE:
+            wcKey = doubleWrit;
+            break;
+        case DataType.FLOAT:
+            wcKey = floatWrit;
+            break;
+        case DataType.INTEGER:
+            wcKey = intWrit;
+            break;
+        case DataType.LONG:
+            wcKey = longWrit;
+            break;
+        case DataType.TUPLE:
+            wcKey = defTup;
+            break;
+//        case DataType.MAP:
+            // Hmm, This is problematic
+            // Need a deep clone to convert a Map into
+            // MapWritable
+            // wcKey = new MapWritable();
+//            break;
+        default:
+            throw new ExecException("The type "
+                    + typeToName.get(type)
+                    + " cannot be collected as a Key type");
+        }
+        return wcKey;
+    }
+    
+    public static Object convertToPigType(WritableComparable key) {
+        if ((key instanceof DataBag) || (key instanceof Tuple))
+            return key;
+        if (key instanceof BooleanWritable)
+            return ((BooleanWritable) key).get();
+        if (key instanceof BytesWritable) {
+            return new DataByteArray(((BytesWritable) key).get(), 0,
+                ((BytesWritable)key).getSize());
+        }
+        if (key instanceof Text)
+            return ((Text) key).toString();
+        if (key instanceof FloatWritable)
+            return ((FloatWritable) key).get();
+        if (key instanceof DoubleWritable)
+            return ((DoubleWritable) key).get();
+        if (key instanceof IntWritable)
+            return ((IntWritable) key).get();
+        if (key instanceof LongWritable)
+            return ((LongWritable) key).get();
+        return null;
+    }
+}

Modified: 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java
URL: 
http://svn.apache.org/viewvc/incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java?rev=680528&r1=680527&r2=680528&view=diff
==============================================================================
--- 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java
 (original)
+++ 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java
 Mon Jul 28 15:05:05 2008
@@ -43,6 +43,14 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.jobcontrol.Job;
 import org.apache.hadoop.mapred.jobcontrol.JobControl;
+
+import org.apache.pig.backend.hadoop.HDataType;
+import 
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan;
+import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
+import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
+import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLoad;
+import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackage;
+import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore;
 import org.apache.pig.data.BagFactory;
 import org.apache.pig.data.DataByteArray;
 import org.apache.pig.ComparisonFunc;
@@ -52,12 +60,6 @@
 import org.apache.pig.impl.PigContext;
 import org.apache.pig.impl.io.FileSpec;
 import org.apache.pig.impl.plan.OperatorKey;
-import 
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan;
-import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
-import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
-import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLoad;
-import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackage;
-import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore;
 import org.apache.pig.impl.util.JarManager;
 import org.apache.pig.impl.util.ObjectSerializer;
 
@@ -269,7 +271,7 @@
                 jobConf.set("pig.mapPlan", 
ObjectSerializer.serialize(mro.mapPlan));
                 jobConf.set("pig.reducePlan", 
ObjectSerializer.serialize(mro.reducePlan));
                 jobConf.set("pig.reduce.package", 
ObjectSerializer.serialize(pack));
-                Class<? extends WritableComparable> keyClass = 
DataType.getWritableComparableTypes(pack.getKeyType()).getClass();
+                Class<? extends WritableComparable> keyClass = 
HDataType.getWritableComparableTypes(pack.getKeyType()).getClass();
                 jobConf.setOutputKeyClass(keyClass);
                 selectComparator(mro, pack.getKeyType(), jobConf);
                 jobConf.setOutputValueClass(IndexedTuple.class);

Modified: 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigCombiner.java
URL: 
http://svn.apache.org/viewvc/incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigCombiner.java?rev=680528&r1=680527&r2=680528&view=diff
==============================================================================
--- 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigCombiner.java
 (original)
+++ 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigCombiner.java
 Mon Jul 28 15:05:05 2008
@@ -33,16 +33,18 @@
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
+
 import org.apache.pig.backend.executionengine.ExecException;
-import org.apache.pig.data.DataType;
-import org.apache.pig.data.IndexedTuple;
-import org.apache.pig.data.TargetedTuple;
-import org.apache.pig.data.Tuple;
+import org.apache.pig.backend.hadoop.HDataType;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
 import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus;
 import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackage;
+import org.apache.pig.data.DataType;
+import org.apache.pig.data.IndexedTuple;
+import org.apache.pig.data.TargetedTuple;
+import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.util.ObjectSerializer;
 
 /**
@@ -129,7 +131,7 @@
             
             pigReporter.setRep(reporter);
             
-            Object k = DataType.convertToPigType(key);
+            Object k = HDataType.convertToPigType(key);
             pack.attachInput(k, indInp);
             
             try {
@@ -156,7 +158,7 @@
                             Tuple tuple = (Tuple)redRes.result;
                             Object combKey = tuple.get(0);
                             IndexedTuple it = (IndexedTuple)tuple.get(1);
-                            WritableComparable wcKey = 
DataType.getWritableComparableTypes(combKey);
+                            WritableComparable wcKey = 
HDataType.getWritableComparableTypes(combKey);
                             oc.collect(wcKey, it);
                             continue;
                         }

Modified: 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
URL: 
http://svn.apache.org/viewvc/incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java?rev=680528&r1=680527&r2=680528&view=diff
==============================================================================
--- 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
 (original)
+++ 
incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
 Mon Jul 28 15:05:05 2008
@@ -33,17 +33,19 @@
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
+
 import org.apache.pig.backend.executionengine.ExecException;
-import org.apache.pig.data.DataType;
-import org.apache.pig.data.IndexedTuple;
-import org.apache.pig.data.TargetedTuple;
-import org.apache.pig.data.Tuple;
+import org.apache.pig.backend.hadoop.HDataType;
 import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
 import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus;
 import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackage;
+import org.apache.pig.data.DataType;
+import org.apache.pig.data.IndexedTuple;
+import org.apache.pig.data.TargetedTuple;
+import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.util.ObjectSerializer;
 import org.apache.pig.impl.util.SpillableMemoryManager;
 
@@ -77,7 +79,7 @@
         public void collect(OutputCollector<WritableComparable, Writable> oc, 
Tuple tuple) throws ExecException, IOException {
             Object key = tuple.get(0);
             IndexedTuple it = (IndexedTuple)tuple.get(1);
-            WritableComparable wcKey = 
DataType.getWritableComparableTypes(key);
+            WritableComparable wcKey = 
HDataType.getWritableComparableTypes(key);
             oc.collect(wcKey, it);
         }
     }
@@ -144,7 +146,7 @@
             
             pigReporter.setRep(reporter);
             
-            Object k = DataType.convertToPigType(key);
+            Object k = HDataType.convertToPigType(key);
             pack.attachInput(k, indInp);
             
             try {

Modified: incubator/pig/branches/types/src/org/apache/pig/data/DataType.java
URL: 
http://svn.apache.org/viewvc/incubator/pig/branches/types/src/org/apache/pig/data/DataType.java?rev=680528&r1=680527&r2=680528&view=diff
==============================================================================
--- incubator/pig/branches/types/src/org/apache/pig/data/DataType.java 
(original)
+++ incubator/pig/branches/types/src/org/apache/pig/data/DataType.java Mon Jul 
28 15:05:05 2008
@@ -130,201 +130,6 @@
         }
     }
     
-    static BooleanWritable boolWrit = new BooleanWritable();
-    static BytesWritable bytesWrit = new BytesWritable();
-    static Text stringWrit = new Text();
-    static FloatWritable floatWrit = new FloatWritable();
-    static IntWritable intWrit = new IntWritable();
-    static LongWritable longWrit = new LongWritable();
-    static DataBag defDB = BagFactory.getInstance().newDefaultBag();
-    static Tuple defTup = TupleFactory.getInstance().newTuple();
-    
-    public static WritableComparable getWritableComparableTypes(Object o) 
throws ExecException{
-        WritableComparable wcKey = null;
-        Map<Byte, String> typeToName = genTypeToNameMap();
-        byte type = DataType.findType(o);
-        switch (type) {
-        case DataType.BAG:
-            wcKey = (DataBag) o;
-            break;
-        case DataType.BOOLEAN:
-            boolWrit.set((Boolean)o);
-            wcKey = boolWrit;
-            break;
-        case DataType.BYTEARRAY:
-            byte[] dbaBytes = ((DataByteArray) o).get();
-            bytesWrit.set(dbaBytes,0,dbaBytes.length);
-            wcKey = bytesWrit;
-            break;
-        case DataType.CHARARRAY:
-            stringWrit.set((String) o);
-            wcKey = stringWrit;
-            break;
-        // Currently Hadoop does not have a DoubleWritable
-        // case DataType.DOUBLE:
-        case DataType.FLOAT:
-            floatWrit.set((Float) o);
-            wcKey = floatWrit;
-            break;
-        case DataType.INTEGER:
-            intWrit.set((Integer) o);
-            wcKey = intWrit;
-            break;
-        case DataType.LONG:
-            longWrit.set((Long) o);
-            wcKey = longWrit;
-            break;
-        case DataType.TUPLE:
-            wcKey = (Tuple) o;
-            break;
-//        case DataType.MAP:
-            // Hmm, This is problematic
-            // Need a deep clone to convert a Map into
-            // MapWritable
-            // wcKey = new MapWritable();
-//            break;
-        default:
-            throw new ExecException("The type "
-                    + typeToName.get(type)
-                    + " cannot be collected as a Key type");
-        }
-        return wcKey;
-    }
-    
-    public static WritableComparable getWritableComparableTypes(byte type) 
throws ExecException{
-        WritableComparable wcKey = null;
-        Map<Byte, String> typeToName = genTypeToNameMap();
-         switch (type) {
-        case DataType.BAG:
-            wcKey = defDB;
-            break;
-        case DataType.BOOLEAN:
-            wcKey = boolWrit;
-            break;
-        case DataType.BYTEARRAY:
-            wcKey = bytesWrit;
-            break;
-        case DataType.CHARARRAY:
-            wcKey = stringWrit;
-            break;
-        // Currently Hadoop does not have a DoubleWritable
-        // case DataType.DOUBLE:
-        case DataType.FLOAT:
-            wcKey = floatWrit;
-            break;
-        case DataType.INTEGER:
-            wcKey = intWrit;
-            break;
-        case DataType.LONG:
-            wcKey = longWrit;
-            break;
-        case DataType.TUPLE:
-            wcKey = defTup;
-            break;
-//        case DataType.MAP:
-            // Hmm, This is problematic
-            // Need a deep clone to convert a Map into
-            // MapWritable
-            // wcKey = new MapWritable();
-//            break;
-        default:
-            throw new ExecException("The type "
-                    + typeToName.get(type)
-                    + " cannot be collected as a Key type");
-        }
-        return wcKey;
-    }
-    
-    /*public static WritableComparable getWritableComparableTypes(Object o) 
throws ExecException{
-        WritableComparable wcKey = null;
-        Map<Byte, String> typeToName = genTypeToNameMap();
-        byte type = DataType.findType(o);
-        switch (type) {
-        case DataType.BAG:
-            wcKey = (DataBag) o;
-            break;
-        case DataType.BOOLEAN:
-            wcKey = new BooleanWritable((Boolean) o);
-            break;
-        case DataType.BYTEARRAY:
-            wcKey = new BytesWritable(((DataByteArray) o)
-                    .get());
-            break;
-        case DataType.CHARARRAY:
-            wcKey = new Text((String) o);
-            break;
-        // Currently Hadoop does not have a DoubleWritable
-        // case DataType.DOUBLE:
-        case DataType.FLOAT:
-            wcKey = new FloatWritable((Float) o);
-            break;
-        case DataType.INTEGER:
-            wcKey = new IntWritable((Integer) o);
-            break;
-        case DataType.LONG:
-            wcKey = new LongWritable((Long) o);
-            break;
-        case DataType.TUPLE:
-            wcKey = (Tuple) o;
-            break;
-        case DataType.MAP:
-            // Hmm, This is problematic
-            // Need a deep clone to convert a Map into
-            // MapWritable
-            // wcKey = new MapWritable();
-            break;
-        default:
-            throw new ExecException("The type "
-                    + typeToName.get(type)
-                    + " cannot be collected as a Key type");
-        }
-        return wcKey;
-    }
-    
-    public static WritableComparable getWritableComparableTypes(byte type) 
throws ExecException{
-        WritableComparable wcKey = null;
-        Map<Byte, String> typeToName = genTypeToNameMap();
-         switch (type) {
-        case DataType.BAG:
-            wcKey = DefaultBagFactory.getInstance().newDefaultBag();
-            break;
-        case DataType.BOOLEAN:
-            wcKey = new BooleanWritable();
-            break;
-        case DataType.BYTEARRAY:
-            wcKey = new BytesWritable();
-            break;
-        case DataType.CHARARRAY:
-            wcKey = new Text();
-            break;
-        // Currently Hadoop does not have a DoubleWritable
-        // case DataType.DOUBLE:
-        case DataType.FLOAT:
-            wcKey = new FloatWritable();
-            break;
-        case DataType.INTEGER:
-            wcKey = new IntWritable();
-            break;
-        case DataType.LONG:
-            wcKey = new LongWritable();
-            break;
-        case DataType.TUPLE:
-            wcKey = TupleFactory.getInstance().newTuple();
-            break;
-        case DataType.MAP:
-            // Hmm, This is problematic
-            // Need a deep clone to convert a Map into
-            // MapWritable
-            // wcKey = new MapWritable();
-            break;
-        default:
-            throw new ExecException("The type "
-                    + typeToName.get(type)
-                    + " cannot be collected as a Key type");
-        }
-        return wcKey;
-    }*/
-    
     public static int numTypes(){
         byte[] types = genAllTypes();
         return types.length;
@@ -370,26 +175,6 @@
         return findTypeName(findType(o));
     }
     
-    public static Object convertToPigType(WritableComparable key) {
-        if ((key instanceof DataBag) || (key instanceof Tuple))
-            return key;
-        if (key instanceof BooleanWritable)
-            return ((BooleanWritable) key).get();
-        if (key instanceof BytesWritable) {
-            return new DataByteArray(((BytesWritable) key).get(), 0,
-                ((BytesWritable)key).getSize());
-        }
-        if (key instanceof Text)
-            return ((Text) key).toString();
-        if (key instanceof FloatWritable)
-            return ((FloatWritable) key).get();
-        if (key instanceof IntWritable)
-            return ((IntWritable) key).get();
-        if (key instanceof LongWritable)
-            return ((LongWritable) key).get();
-        return null;
-    }
-
     /**
      * Get the type name from the type byte code
      * @param dt Type byte code

Modified: 
incubator/pig/branches/types/test/org/apache/pig/test/TestLogToPhyCompiler.java
URL: 
http://svn.apache.org/viewvc/incubator/pig/branches/types/test/org/apache/pig/test/TestLogToPhyCompiler.java?rev=680528&r1=680527&r2=680528&view=diff
==============================================================================
--- 
incubator/pig/branches/types/test/org/apache/pig/test/TestLogToPhyCompiler.java 
(original)
+++ 
incubator/pig/branches/types/test/org/apache/pig/test/TestLogToPhyCompiler.java 
Mon Jul 28 15:05:05 2008
@@ -501,7 +501,7 @@
         System.out.println("-------------");
 
         //System.out.println(compiledPlan.compareTo(goldenPlan)==0);
-        assertEquals(true, compiledPlan.compareTo(goldenPlan) == 0);
+        assertEquals(compiledPlan, goldenPlan);
     }
 
     @Test
@@ -525,7 +525,7 @@
         System.out.println("-------------");
 
         //System.out.println(compiledPlan.compareTo(goldenPlan)==0);
-        assertEquals(true, compiledPlan.compareTo(goldenPlan) == 0);
+        assertEquals(compiledPlan, goldenPlan);
     }
 
     

Modified: 
incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/Limit.gld
URL: 
http://svn.apache.org/viewvc/incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/Limit.gld?rev=680528&r1=680527&r2=680528&view=diff
==============================================================================
--- 
incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/Limit.gld
 (original)
+++ 
incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/Limit.gld
 Mon Jul 28 15:05:05 2008
@@ -1,3 +1,3 @@
 Limit - Test-Plan-Builder-224
 |
-|---Load(a:org.apache.pig.builtin.PigStorage()) - Test-Plan-Builder-223
\ No newline at end of file
+|---Load(a:org.apache.pig.builtin.PigStorage) - Test-Plan-Builder-223
\ No newline at end of file

Modified: 
incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/LimitedSort.gld
URL: 
http://svn.apache.org/viewvc/incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/LimitedSort.gld?rev=680528&r1=680527&r2=680528&view=diff
==============================================================================
--- 
incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/LimitedSort.gld
 (original)
+++ 
incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/LimitedSort.gld
 Mon Jul 28 15:05:05 2008
@@ -1,5 +1,5 @@
-POSort[bag] - Test-Plan-Builder-230
+POSort[bag]() - Test-Plan-Builder-230
 |   |
 |   Project[bytearray][0] - Test-Plan-Builder-229
 |
-|---Load(a:org.apache.pig.builtin.PigStorage()) - Test-Plan-Builder-228
\ No newline at end of file
+|---Load(a:org.apache.pig.builtin.PigStorage) - Test-Plan-Builder-228
\ No newline at end of file

Modified: 
incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/MRC17.gld
URL: 
http://svn.apache.org/viewvc/incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/MRC17.gld?rev=680528&r1=680527&r2=680528&view=diff
==============================================================================
--- 
incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/MRC17.gld
 (original)
+++ 
incubator/pig/branches/types/test/org/apache/pig/test/data/GoldenFiles/MRC17.gld
 Mon Jul 28 15:05:05 2008
@@ -1,17 +1,17 @@
-MapReduce(-1) - -184:
-|   Store(DummyFil:DummyLdr) - -6079615556647418436
+MapReduce(-1) - -174:
+|   Store(DummyFil:DummyLdr) - -7856319821130535798
 |   |
-|   |---New For Each(false)[bag] - -189
+|   |---New For Each(false)[bag] - -180
 |       |   |
-|       |   Project[tuple][0] - -188
+|       |   Project[tuple][0] - -179
 |       |
-|       |---Limit - --982263781809208816
+|       |---Limit - -178
 |           |
-|           |---Package[tuple]{tuple} - -187
-|   Local Rearrange[tuple]{tuple} - -186
+|           |---Package[tuple]{tuple} - -177
+|   Local Rearrange[tuple]{tuple} - -176
 |   |   |
-|   |   Project[tuple][*] - -185
+|   |   Project[tuple][*] - -175
 |   |
-|   |---Limit - --982263781809208816
+|   |---Limit - -7398260302074824818
 |       |
-|       |---Load(DummyFil:DummyLdr) - -8219725798912083822
\ No newline at end of file
+|       |---Load(DummyFil:DummyLdr) - -4188863770717253580
\ No newline at end of file


Reply via email to