Author: khorgath
Date: Thu Feb 16 00:07:32 2012
New Revision: 1244798
URL: http://svn.apache.org/viewvc?rev=1244798&view=rev
Log:
Changes to HCatRecord to support switch from StorageDriver to SerDe (khorgath)
Added:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/LazyHCatRecord.java
Modified:
incubator/hcatalog/trunk/CHANGES.txt
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordSerDe.java
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordable.java
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestDefaultHCatRecord.java
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/rcfile/TestRCFileInputStorageDriver.java
Modified: incubator/hcatalog/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/CHANGES.txt?rev=1244798&r1=1244797&r2=1244798&view=diff
==============================================================================
--- incubator/hcatalog/trunk/CHANGES.txt (original)
+++ incubator/hcatalog/trunk/CHANGES.txt Thu Feb 16 00:07:32 2012
@@ -21,6 +21,9 @@ Apache HCatalog Change Log
Trunk (unreleased changes)
INCOMPATIBLE CHANGES
+ HCAT-241. Changes to HCatRecord to support switch from StorageDriver to
SerDe (khorgath)
+
+ HCAT-240. Changes to HCatOutputFormat to make it use SerDes instead of
StorageDriver (toffer)
NEW FEATURES
HCAT-255 Define hadoop properties on the hcat command line (ctdean via gates)
@@ -28,7 +31,6 @@ Trunk (unreleased changes)
HCAT-2 Support nested schema conversion between Hive an Pig (julienledem via
hashutosh)
IMPROVEMENTS
- HCAT-240. Changes to HCatOutputFormat to make it use SerDes instead of
StorageDriver (toffer)
HCAT-194. Better error messages for HCatalog access control errors
(julienledem via hashutosh)
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java?rev=1244798&r1=1244797&r2=1244798&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
(original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
Thu Feb 16 00:07:32 2012
@@ -47,6 +47,8 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
@@ -58,6 +60,9 @@ import org.apache.hadoop.security.token.
import org.apache.hadoop.security.token.TokenIdentifier;
import
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hcatalog.data.DataType;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.data.HCatRecordSerDe;
import org.apache.hcatalog.data.Pair;
import org.apache.hcatalog.data.schema.HCatFieldSchema;
import org.apache.hcatalog.data.schema.HCatSchema;
@@ -71,7 +76,7 @@ import org.apache.thrift.TException;
public class HCatUtil {
- // static final private Log LOG = LogFactory.getLog(HCatUtil.class);
+// static final private Log LOG = LogFactory.getLog(HCatUtil.class);
public static boolean checkJobContextIfRunningFromBackend(JobContext j) {
if (j.getConfiguration().get("mapred.task.id", "").equals("")) {
@@ -395,7 +400,7 @@ public class HCatUtil {
public static void logStackTrace(Log logger) {
StackTraceElement[] stackTrace = new Exception().getStackTrace();
for (int i = 1; i < stackTrace.length; i++) {
- logger.info("\t" + stackTrace[i].toString());
+ logger.debug("\t" + stackTrace[i].toString());
}
}
@@ -412,9 +417,9 @@ public class HCatUtil {
public static void logList(Log logger, String itemName,
List<? extends Object> list) {
- logger.info(itemName + ":");
+ logger.debug(itemName + ":");
for (Object item : list) {
- logger.info("\t[" + item + "]");
+ logger.debug("\t[" + item + "]");
}
}
@@ -520,6 +525,43 @@ public class HCatUtil {
+"<databasename>.<table name> or <table name>. Got " + tableName);
}
}
+
+ public static boolean recordsEqual(HCatRecord first, HCatRecord second) {
+ return (compareRecords(first,second) == 0);
+ }
+
+ public static int compareRecords(HCatRecord first, HCatRecord second) {
+ return compareRecordContents(first.getAll(), second.getAll());
+ }
+
+ public static int compareRecordContents(List<Object> first, List<Object>
second) {
+ int mySz = first.size();
+ int urSz = second.size();
+ if(mySz != urSz) {
+ return mySz - urSz;
+ } else {
+ for (int i = 0; i < first.size(); i++) {
+ int c = DataType.compare(first.get(i), second.get(i));
+ if (c != 0) {
+ return c;
+ }
+ }
+ return 0;
+ }
+ }
+
+ public static ObjectInspector getObjectInspector(String serdeClassName,
+ Configuration conf, Properties tbl) throws Exception {
+ SerDe s = (SerDe) Class.forName(serdeClassName).newInstance();
+ s.initialize(conf, tbl);
+ return s.getObjectInspector();
+ }
+
+ public static ObjectInspector getHCatRecordObjectInspector(HCatSchema
hsch) throws Exception{
+ HCatRecordSerDe hrsd = new HCatRecordSerDe();
+ hrsd.initialize(hsch);
+ return hrsd.getObjectInspector();
+ }
public static void configureOutputStorageHandler(HCatStorageHandler
storageHandler,
JobContext context,
@@ -560,4 +602,5 @@ public class HCatUtil {
dest.set(el.getKey(),el.getValue());
}
}
+
}
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java?rev=1244798&r1=1244797&r2=1244798&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java
(original)
+++
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java
Thu Feb 16 00:07:32 2012
@@ -25,6 +25,7 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.hcatalog.common.HCatException;
+import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.data.schema.HCatSchema;
public class DefaultHCatRecord extends HCatRecord {
@@ -92,34 +93,6 @@ public class DefaultHCatRecord extends H
}
@Override
- public int compareTo(Object that) {
-
- if(that instanceof HCatRecord) {
- HCatRecord other = (HCatRecord)that;
- int mySz = this.size();
- int urSz = other.size();
- if(mySz != urSz) {
- return mySz - urSz;
- } else{
- for (int i = 0; i < mySz;i++) {
- int c = DataType.compare(get(i), other.get(i));
- if (c != 0) {
- return c;
- }
- }
- }
- return 0;
- } else {
- return DataType.compare(this, that);
- }
- }
-
- @Override
- public boolean equals(Object other) {
- return (compareTo(other) == 0);
- }
-
- @Override
public int hashCode() {
int hash = 1;
for (Object o : contents) {
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java?rev=1244798&r1=1244797&r2=1244798&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java
(original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java
Thu Feb 16 00:07:32 2012
@@ -22,6 +22,7 @@ import java.util.List;
import java.util.Map;
import org.apache.hcatalog.common.HCatException;
+import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.data.schema.HCatSchema;
/**
@@ -134,4 +135,14 @@ public abstract class HCatRecord impleme
set(fieldName,recordSchema,value);
}
+// @Override
+// public int compareTo(Object that) {
+// return HCatUtil.compareRecords(this,(HCatRecord)that);
+// }
+//
+// @Override
+// public boolean equals(Object other) {
+// return (compareTo(other) == 0);
+// }
+
}
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java?rev=1244798&r1=1244797&r2=1244798&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java
(original)
+++
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java
Thu Feb 16 00:07:32 2012
@@ -19,12 +19,19 @@ package org.apache.hcatalog.data;
import java.util.List;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import
org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hcatalog.common.HCatException;
+import org.apache.hcatalog.common.HCatUtil;
public class HCatRecordObjectInspector extends StandardStructObjectInspector {
+ public static final Log LOG = LogFactory
+ .getLog(HCatRecordObjectInspector.class.getName());
+
protected HCatRecordObjectInspector(List<String> structFieldNames,
List<ObjectInspector> structFieldObjectInspectors) {
super(structFieldNames, structFieldObjectInspectors);
@@ -37,7 +44,9 @@ public class HCatRecordObjectInspector e
}
int fieldID = ((MyField) fieldRef).getFieldID();
- assert (fieldID >= 0 && fieldID < fields.size());
+ if (!(fieldID >= 0 && fieldID < fields.size())){
+ throw new RuntimeException("Invalid field index ["+fieldID+"]");
+ }
return ((HCatRecord) data).get(fieldID);
}
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordSerDe.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordSerDe.java?rev=1244798&r1=1244797&r2=1244798&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordSerDe.java
(original)
+++
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordSerDe.java
Thu Feb 16 00:07:32 2012
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.Writable;
import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.schema.HCatSchema;
/**
* SerDe class for serializing to and from HCatRecord
@@ -108,6 +109,18 @@ public class HCatRecordSerDe implements
cachedObjectInspector =
HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(rowTypeInfo);
}
+
+ public void initialize(HCatSchema hsch) throws SerDeException {
+
+ if (LOG.isDebugEnabled()){
+ LOG.debug("Initializing HCatRecordSerDe through HCatSchema" +
hsch.toString());
+ }
+
+ rowTypeInfo = (StructTypeInfo)
TypeInfoUtils.getTypeInfoFromTypeString(hsch.toString());
+ cachedObjectInspector =
HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(rowTypeInfo);
+
+ }
+
/**
* The purpose of a deserialize method is to turn a data blob
@@ -156,7 +169,7 @@ public class HCatRecordSerDe implements
* @param soi : StructObjectInspector
* @return HCatRecord
*/
- private List<?> serializeStruct(Object obj, StructObjectInspector soi)
+ private static List<?> serializeStruct(Object obj, StructObjectInspector soi)
throws SerDeException {
List<? extends StructField> fields = soi.getAllStructFieldRefs();
@@ -181,7 +194,7 @@ public class HCatRecordSerDe implements
* Return underlying Java Object from an object-representation
* that is readable by a provided ObjectInspector.
*/
- private Object serializeField(Object field,
+ public static Object serializeField(Object field,
ObjectInspector fieldObjectInspector) throws SerDeException {
Object res = null;
if (fieldObjectInspector.getCategory() == Category.PRIMITIVE){
@@ -193,7 +206,7 @@ public class HCatRecordSerDe implements
} else if (fieldObjectInspector.getCategory() == Category.MAP){
res = serializeMap(field,(MapObjectInspector)fieldObjectInspector);
} else {
- throw new SerDeException(getClass().toString()
+ throw new SerDeException(HCatRecordSerDe.class.toString()
+ " does not know what to do with fields of unknown category: "
+ fieldObjectInspector.getCategory() + " , type: " +
fieldObjectInspector.getTypeName());
}
@@ -205,7 +218,7 @@ public class HCatRecordSerDe implements
* an object-representation that is readable by a provided
* MapObjectInspector
*/
- private Map<?,?> serializeMap(Object f, MapObjectInspector moi) throws
SerDeException {
+ private static Map<?,?> serializeMap(Object f, MapObjectInspector moi)
throws SerDeException {
ObjectInspector koi = moi.getMapKeyObjectInspector();
ObjectInspector voi = moi.getMapValueObjectInspector();
Map<Object,Object> m = new TreeMap<Object, Object>();
@@ -221,7 +234,7 @@ public class HCatRecordSerDe implements
return m;
}
- private List<?> serializeList(Object f, ListObjectInspector loi) throws
SerDeException {
+ private static List<?> serializeList(Object f, ListObjectInspector loi)
throws SerDeException {
List l = loi.getList(f);
ObjectInspector eloi = loi.getListElementObjectInspector();
if (eloi.getCategory() == Category.PRIMITIVE){
@@ -244,7 +257,7 @@ public class HCatRecordSerDe implements
}
throw new SerDeException("HCatSerDe map type unimplemented");
} else {
- throw new SerDeException(getClass().toString()
+ throw new SerDeException(HCatRecordSerDe.class.toString()
+ " does not know what to do with fields of unknown category: "
+ eloi.getCategory() + " , type: " + eloi.getTypeName());
}
@@ -274,4 +287,5 @@ public class HCatRecordSerDe implements
return null;
}
+
}
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordable.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordable.java?rev=1244798&r1=1244797&r2=1244798&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordable.java
(original)
+++
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordable.java
Thu Feb 16 00:07:32 2012
@@ -19,17 +19,19 @@ package org.apache.hcatalog.data;
import java.util.List;
-import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hcatalog.common.HCatException;
/**
* Interface that determines whether we can implement a HCatRecord on top of it
*/
-public interface HCatRecordable extends WritableComparable<Object> {
+public interface HCatRecordable extends Writable {
/**
* Gets the field at the specified index.
* @param fieldNum the field number
* @return the object at the specified index
+ * @throws HCatException
*/
Object get(int fieldNum);
Added:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/LazyHCatRecord.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/LazyHCatRecord.java?rev=1244798&view=auto
==============================================================================
---
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/LazyHCatRecord.java
(added)
+++
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/LazyHCatRecord.java
Thu Feb 16 00:07:32 2012
@@ -0,0 +1,123 @@
+package org.apache.hcatalog.data;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hcatalog.common.HCatException;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.schema.HCatSchema;
+
+public class LazyHCatRecord extends HCatRecord {
+
+ public static final Log LOG = LogFactory
+ .getLog(LazyHCatRecord.class.getName());
+
+ private Object o;
+ private StructObjectInspector soi;
+ private int size;
+
+ @Override
+ public Object get(int fieldNum) {
+ try {
+ StructField fref = soi.getAllStructFieldRefs().get(fieldNum);
+ return deserialize(soi.getStructFieldData(o,
fref),fref.getFieldObjectInspector());
+ } catch (Exception e) {
+ throw new RuntimeException("SerDe Exception deserializing",e);
+ }
+ }
+
+ private Object deserialize(Object o, ObjectInspector objectInspector) throws
Exception {
+ return HCatRecordSerDe.serializeField(o, objectInspector);
+ }
+
+ @Override
+ public List<Object> getAll() {
+
+ List<Object> r = new ArrayList<Object>(this.size);
+ for (int i = 0; i < this.size; i++){
+ r.set(i, get(i));
+ }
+ return r;
+ }
+
+ @Override
+ public void set(int fieldNum, Object value) {
+ throw new RuntimeException("not allowed to run set() on LazyHCatRecord");
+ }
+
+ @Override
+ public int size() {
+ return this.size;
+ }
+
+ @Override
+ public void readFields(DataInput in) throws IOException {
+ throw new RuntimeException("LazyHCatRecord is intended to wrap"
+ + " an object/object inspector as a HCatRecord "
+ + "- it does not need to be read from DataInput.");
+ }
+
+ @Override
+ public void write(DataOutput out) throws IOException {
+ throw new RuntimeException("LazyHCatRecord is intended to wrap"
+ + " an object/object inspector as a HCatRecord "
+ + "- it does not need to be written to a DataOutput.");
+ }
+
+ @Override
+ public Object get(String fieldName, HCatSchema recordSchema)
+ throws HCatException {
+ int idx = recordSchema.getPosition(fieldName);
+ return get(idx);
+ }
+
+ @Override
+ public void set(String fieldName, HCatSchema recordSchema, Object value)
+ throws HCatException {
+ throw new RuntimeException("not allowed to run set() on LazyHCatRecord");
+ }
+
+ @Override
+ public void remove(int idx) throws HCatException {
+ throw new RuntimeException("not allowed to run remove() on
LazyHCatRecord");
+ }
+
+ @Override
+ public void copy(HCatRecord r) throws HCatException {
+ throw new RuntimeException("not allowed to run copy() on LazyHCatRecord");
+ }
+
+ public LazyHCatRecord(Object o, ObjectInspector oi) throws Exception{
+
+ if (oi.getCategory() != Category.STRUCT) {
+ throw new SerDeException(getClass().toString()
+ + " can only make a lazy hcat record from objects of struct types,
but we got: "
+ + oi.getTypeName());
+ }
+
+ this.soi = (StructObjectInspector)oi;
+ this.o = o;
+ this.size = soi.getAllStructFieldRefs().size();
+
+ }
+
+ @Override
+ public String toString(){
+ StringBuilder sb = new StringBuilder();
+ for(int i = 0; i< size ; i++) {
+ sb.append(get(i)+"\t");
+ }
+ return sb.toString();
+ }
+
+}
Modified:
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestDefaultHCatRecord.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestDefaultHCatRecord.java?rev=1244798&r1=1244797&r2=1244798&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestDefaultHCatRecord.java
(original)
+++
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestDefaultHCatRecord.java
Thu Feb 16 00:07:32 2012
@@ -32,6 +32,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.data.DefaultHCatRecord;
import org.apache.hcatalog.data.HCatRecord;
@@ -63,7 +64,7 @@ public class TestDefaultHCatRecord exten
for(int i =0; i < recs.length; i++){
HCatRecord rec = new DefaultHCatRecord();
rec.readFields(inpStream);
- Assert.assertEquals(recs[i],rec);
+ Assert.assertTrue(HCatUtil.recordsEqual(recs[i],rec));
}
Assert.assertEquals(fInStream.available(), 0);
@@ -73,13 +74,13 @@ public class TestDefaultHCatRecord exten
public void testCompareTo() {
HCatRecord[] recs = getHCatRecords();
- Assert.assertEquals(recs[0].compareTo(recs[1]),0);
+ Assert.assertTrue(HCatUtil.compareRecords(recs[0],recs[1]) == 0);
}
public void testEqualsObject() {
HCatRecord[] recs = getHCatRecords();
- Assert.assertTrue(recs[0].equals(recs[1]));
+ Assert.assertTrue(HCatUtil.recordsEqual(recs[0],recs[1]));
}
private HCatRecord[] getHCatRecords(){
Modified:
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java?rev=1244798&r1=1244797&r2=1244798&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java
(original)
+++
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java
Thu Feb 16 00:07:32 2012
@@ -26,8 +26,10 @@ import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.DelimitedJSONSerDe;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.io.Writable;
+import org.apache.hcatalog.common.HCatUtil;
import junit.framework.Assert;
import junit.framework.TestCase;
@@ -90,6 +92,8 @@ public class TestHCatRecordSerDe extends
props.put(Constants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1");
props.put(Constants.LIST_COLUMN_TYPES, typeString);
+// props.put(Constants.SERIALIZATION_NULL_FORMAT, "\\N");
+// props.put(Constants.SERIALIZATION_FORMAT, "1");
data.put(props, new DefaultHCatRecord(rlist));
return data;
@@ -112,7 +116,7 @@ public class TestHCatRecordSerDe extends
System.out.println("ONE:"+s.toString());
HCatRecord r2 = (HCatRecord) hrsd.deserialize(s);
- Assert.assertTrue(r.equals(r2));
+ Assert.assertTrue(HCatUtil.recordsEqual(r,r2));
// If it went through correctly, then s is also a HCatRecord,
// and also equal to the above, and a deepcopy, and this holds
@@ -120,13 +124,13 @@ public class TestHCatRecordSerDe extends
Writable s2 = hrsd.serialize(s, hrsd.getObjectInspector());
System.out.println("TWO:"+s2.toString());
- Assert.assertTrue(r.equals((HCatRecord)s));
- Assert.assertTrue(r.equals((HCatRecord)s2));
+ Assert.assertTrue(HCatUtil.recordsEqual(r,(HCatRecord)s));
+ Assert.assertTrue(HCatUtil.recordsEqual(r,(HCatRecord)s2));
// serialize using another serde, and read out that object repr.
LazySimpleSerDe testSD = new LazySimpleSerDe();
testSD.initialize(conf, tblProps);
-
+
Writable s3 = testSD.serialize(s, hrsd.getObjectInspector());
System.out.println("THREE:"+s3.toString());
Object o3 = testSD.deserialize(s3);
@@ -135,8 +139,19 @@ public class TestHCatRecordSerDe extends
// then serialize again using hrsd, and compare results
HCatRecord s4 = (HCatRecord) hrsd.serialize(o3,
testSD.getObjectInspector());
System.out.println("FOUR:"+s4.toString());
- Assert.assertFalse(r.equals(s4));
+
+ // Test LazyHCatRecord init and read
+ LazyHCatRecord s5 = new LazyHCatRecord(o3,testSD.getObjectInspector());
+ System.out.println("FIVE:"+s5.toString());
+
+ LazyHCatRecord s6 = new LazyHCatRecord(s4,hrsd.getObjectInspector());
+ System.out.println("SIX:"+s6.toString());
+
+ DelimitedJSONSerDe jsde = new DelimitedJSONSerDe();
+ jsde.initialize(conf,tblProps);
+ Writable s7 = jsde.serialize(s6, hrsd.getObjectInspector());
+ System.out.println("SEVEN:"+s7);
}
}
Modified:
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java?rev=1244798&r1=1244797&r2=1244798&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java
(original)
+++
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java
Thu Feb 16 00:07:32 2012
@@ -133,27 +133,28 @@ public class TestHCatDynamicPartitioned
}
//TODO 1.0 miniCluster is slow this test times out, make it work
-// public void testHCatDynamicPartitionMaxPartitions() throws Exception {
-// HiveConf hc = new HiveConf(this.getClass());
-//
-// int maxParts =
hiveConf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS);
-// System.out.println("Max partitions allowed = " + maxParts);
-//
-// IOException exc = null;
-// try {
-// generateWriteRecords(maxParts+5,maxParts+2,10);
-// runMRCreate(null,dataColumns,writeRecords,maxParts+5,false);
-// } catch(IOException e) {
-// exc = e;
-// }
-//
-// if (HCatConstants.HCAT_IS_DYNAMIC_MAX_PTN_CHECK_ENABLED){
-// assertTrue(exc != null);
-// assertTrue(exc instanceof HCatException);
-// assertEquals(ErrorType.ERROR_TOO_MANY_DYNAMIC_PTNS, ((HCatException)
exc).getErrorType());
-// }else{
-// assertTrue(exc == null);
-// runMRRead(maxParts+5);
-// }
-// }
+// renaming test to make test framework skip it
+ public void _testHCatDynamicPartitionMaxPartitions() throws Exception {
+ HiveConf hc = new HiveConf(this.getClass());
+
+ int maxParts =
hiveConf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS);
+ System.out.println("Max partitions allowed = " + maxParts);
+
+ IOException exc = null;
+ try {
+ generateWriteRecords(maxParts+5,maxParts+2,10);
+ runMRCreate(null,dataColumns,writeRecords,maxParts+5,false);
+ } catch(IOException e) {
+ exc = e;
+ }
+
+ if (HCatConstants.HCAT_IS_DYNAMIC_MAX_PTN_CHECK_ENABLED){
+ assertTrue(exc != null);
+ assertTrue(exc instanceof HCatException);
+ assertEquals(ErrorType.ERROR_TOO_MANY_DYNAMIC_PTNS, ((HCatException)
exc).getErrorType());
+ }else{
+ assertTrue(exc == null);
+ runMRRead(maxParts+5);
+ }
+ }
}
Modified:
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/rcfile/TestRCFileInputStorageDriver.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/rcfile/TestRCFileInputStorageDriver.java?rev=1244798&r1=1244797&r2=1244798&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/rcfile/TestRCFileInputStorageDriver.java
(original)
+++
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/rcfile/TestRCFileInputStorageDriver.java
Thu Feb 16 00:07:32 2012
@@ -119,7 +119,7 @@ public class TestRCFileInputStorageDrive
Assert.assertEquals(bytesArr[j], w);
HCatRecord t = sd.convertToHCatRecord(null,w);
Assert.assertEquals(8, t.size());
- Assert.assertEquals(t,tuples[j]);
+ Assert.assertTrue(HCatUtil.recordsEqual(t,tuples[j]));
}
}
@@ -147,7 +147,7 @@ public class TestRCFileInputStorageDrive
Assert.assertEquals(w.size(), 8);
HCatRecord t = sd.convertToHCatRecord(null,w);
Assert.assertEquals(5, t.size());
- Assert.assertEquals(t,tuples[j]);
+ Assert.assertTrue(HCatUtil.recordsEqual(t,tuples[j]));
}
assertFalse(rr.nextKeyValue());
}
@@ -179,7 +179,7 @@ public class TestRCFileInputStorageDrive
Assert.assertEquals(w.size(), 8);
HCatRecord t = sd.convertToHCatRecord(null,w);
Assert.assertEquals(7, t.size());
- Assert.assertEquals(t,tuples[j]);
+ Assert.assertTrue(HCatUtil.recordsEqual(t,tuples[j]));
}
assertFalse(rr.nextKeyValue());
}