Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/rcfile/RCFileInputDriver.java URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/rcfile/RCFileInputDriver.java?rev=1152865&r1=1152864&r2=1152865&view=diff ============================================================================== --- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/rcfile/RCFileInputDriver.java (original) +++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/rcfile/RCFileInputDriver.java Mon Aug 1 17:17:04 2011 @@ -17,6 +17,16 @@ */ package org.apache.hcatalog.rcfile; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Map.Entry; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.metastore.MetaStoreUtils; @@ -28,7 +38,12 @@ import org.apache.hadoop.hive.serde2.Ser import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; import org.apache.hadoop.hive.serde2.columnar.ColumnarStruct; -import org.apache.hadoop.hive.serde2.objectinspector.*; +import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; @@ -41,10 +56,6 @@ import org.apache.hcatalog.data.schema.H import org.apache.hcatalog.data.schema.HCatSchema; import org.apache.hcatalog.mapreduce.HCatInputStorageDriver; -import java.io.IOException; -import java.util.*; -import java.util.Map.Entry; - public class RCFileInputDriver extends HCatInputStorageDriver{
Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/listener/TestNotificationListener.java URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/listener/TestNotificationListener.java?rev=1152865&r1=1152864&r2=1152865&view=diff ============================================================================== --- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/listener/TestNotificationListener.java (original) +++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/listener/TestNotificationListener.java Mon Aug 1 17:17:04 2011 @@ -18,25 +18,45 @@ package org.apache.hcatalog.listener; -import junit.framework.TestCase; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +import javax.jms.Connection; +import javax.jms.ConnectionFactory; +import javax.jms.Destination; +import javax.jms.JMSException; +import javax.jms.MapMessage; +import javax.jms.Message; +import javax.jms.MessageConsumer; +import javax.jms.MessageListener; +import javax.jms.ObjectMessage; +import javax.jms.Session; + import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.api.*; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.InvalidPartitionException; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PartitionEventType; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.api.UnknownDBException; +import org.apache.hadoop.hive.metastore.api.UnknownPartitionException; +import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hcatalog.common.HCatConstants; import org.apache.thrift.TException; -import javax.jms.*; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicInteger; +import junit.framework.TestCase; public class TestNotificationListener extends TestCase implements MessageListener{ @@ -81,7 +101,7 @@ public class TestNotificationListener ex super.tearDown(); } - public void testAMQListener() throws MetaException, TException, UnknownTableException, NoSuchObjectException, + public void testAMQListener() throws MetaException, TException, UnknownTableException, NoSuchObjectException, CommandNeedRetryException, UnknownDBException, InvalidPartitionException, UnknownPartitionException{ driver.run("create database mydb"); driver.run("use mydb"); Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java?rev=1152865&r1=1152864&r2=1152865&view=diff ============================================================================== --- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java (original) +++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java Mon Aug 1 17:17:04 2011 @@ -263,8 +263,8 @@ public abstract class HCatMapReduceTest job.setOutputFormatClass(HCatOutputFormat.class); - OutputJobInfo outputJobInfo = OutputJobInfo.create(dbName, tableName, partitionValues, thriftUri, null); - HCatOutputFormat.setOutput(job, outputJobInfo); + HCatTableInfo outputInfo = HCatTableInfo.getOutputTableInfo(thriftUri, null, dbName, tableName, partitionValues); + HCatOutputFormat.setOutput(job, outputInfo); job.setMapOutputKeyClass(BytesWritable.class); job.setMapOutputValueClass(DefaultHCatRecord.class); @@ -300,8 +300,9 @@ public abstract class HCatMapReduceTest job.setInputFormatClass(HCatInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); - InputJobInfo inputJobInfo = InputJobInfo.create(dbName,tableName,filter,thriftUri,null); - HCatInputFormat.setInput(job, inputJobInfo); + HCatTableInfo inputInfo = HCatTableInfo.getInputTableInfo( + thriftUri, null, dbName, tableName, filter); + HCatInputFormat.setInput(job, inputInfo); job.setMapOutputKeyClass(BytesWritable.class); job.setMapOutputValueClass(Text.class); @@ -332,8 +333,8 @@ public abstract class HCatMapReduceTest job.setInputFormatClass(HCatInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); - InputJobInfo inputJobInfo = InputJobInfo.create(dbName,tableName,null,thriftUri,null); - HCatInputFormat.setInput(job, inputJobInfo); + HCatTableInfo inputInfo = HCatTableInfo.getInputTableInfo(thriftUri, null, dbName, tableName); + HCatInputFormat.setInput(job, inputInfo); return HCatInputFormat.getTableSchema(job); } Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java?rev=1152865&r1=1152864&r2=1152865&view=diff ============================================================================== --- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java (original) +++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java Mon Aug 1 17:17:04 2011 @@ -41,6 +41,12 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.util.StringUtils; import org.apache.hcatalog.common.HCatConstants; +import org.apache.hcatalog.mapreduce.HCatOutputCommitter; +import org.apache.hcatalog.mapreduce.HCatOutputFormat; +import org.apache.hcatalog.mapreduce.HCatTableInfo; +import org.apache.hcatalog.mapreduce.InitializeInput; +import org.apache.hcatalog.mapreduce.OutputJobInfo; +import org.apache.hcatalog.mapreduce.StorerInfo; import org.apache.hcatalog.rcfile.RCFileOutputDriver; public class TestHCatOutputFormat extends TestCase { @@ -136,19 +142,20 @@ public class TestHCatOutputFormat extend Map<String, String> partitionValues = new HashMap<String, String>(); partitionValues.put("colname", "p1"); //null server url means local mode - OutputJobInfo info = OutputJobInfo.create(dbName, tblName, partitionValues, null, null); + HCatTableInfo info = HCatTableInfo.getOutputTableInfo(null, null, dbName, tblName, partitionValues); HCatOutputFormat.setOutput(job, info); OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(job); assertNotNull(jobInfo.getTableInfo()); - assertEquals(1, jobInfo.getPartitionValues().size()); - assertEquals("p1", jobInfo.getPartitionValues().get("colname")); - assertEquals(1, jobInfo.getTableInfo().getDataColumns().getFields().size()); - assertEquals("colname", jobInfo.getTableInfo().getDataColumns().getFields().get(0).getName()); + assertEquals(1, jobInfo.getTableInfo().getPartitionValues().size()); + assertEquals("p1", jobInfo.getTableInfo().getPartitionValues().get("colname")); + assertEquals(1, jobInfo.getTableSchema().getFields().size()); + assertEquals("colname", jobInfo.getTableSchema().getFields().get(0).getName()); - StorerInfo storer = jobInfo.getTableInfo().getStorerInfo(); + StorerInfo storer = jobInfo.getStorerInfo(); assertEquals(RCFileOutputDriver.class.getName(), storer.getOutputSDClass()); + publishTest(job); }
