Author: khorgath
Date: Wed Sep 25 03:13:29 2013
New Revision: 1526095
URL: http://svn.apache.org/r1526095
Log:
HIVE-5274 : HCatalog package renaming backward compatibility follow-up
(Sushanth Somwyan)
Added:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/SkeletonHBaseTest.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java
- copied, changed from r1526094,
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHiveHBaseStorageHandler.java
- copied, changed from r1526094,
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHiveHBaseStorageHandler.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHiveHBaseTableOutputFormat.java
- copied, changed from r1526094,
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHiveHBaseTableOutputFormat.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
- copied, changed from r1526094,
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestPigHBaseStorageHandler.java
Removed:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHiveHBaseStorageHandler.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHiveHBaseTableOutputFormat.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestPigHBaseStorageHandler.java
Modified:
hive/branches/branch-0.12/hcatalog/build-support/ant/checkstyle.xml
hive/branches/branch-0.12/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatStorageHandler.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBaseOutputFormat.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseConstants.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HbaseSnapshotRecordReader.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/ResultConverter.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
Modified: hive/branches/branch-0.12/hcatalog/build-support/ant/checkstyle.xml
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/build-support/ant/checkstyle.xml?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
--- hive/branches/branch-0.12/hcatalog/build-support/ant/checkstyle.xml
(original)
+++ hive/branches/branch-0.12/hcatalog/build-support/ant/checkstyle.xml Wed Sep
25 03:13:29 2013
@@ -33,6 +33,7 @@
<fileset dir="${basedir}">
<exclude name="**/.*"/>
<exclude name="**/build/**"/>
+ <exclude name="**/.settings/**"/>
<exclude name=".idea/**"/>
<exclude name="historical/**"/>
<exclude name="build-support/checkstyle/apache_header.txt"/>
@@ -48,6 +49,12 @@
<exclude name="storage-handlers/hbase/metastore_db/**"/>
<exclude name="storage-handlers/hbase/partitions*"/>
<exclude name="storage-handlers/hbase/.partitions*"/>
+ <exclude name="hcatalog-pig-adapter/target/**"/>
+ <exclude name="server-extensions/target/**"/>
+ <exclude name="core/target/**"/>
+ <exclude name="webhcat/java-client/target/**"/>
+ <exclude name="storage-handlers/hbase/target/**"/>
+ <exclude name="webhcat/svr/target/**"/>
<exclude name="KEYS"/>
<exclude name="LICENSE.txt"/>
<exclude name="NOTICE.txt"/>
Modified:
hive/branches/branch-0.12/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatStorageHandler.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatStorageHandler.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatStorageHandler.java
(original)
+++
hive/branches/branch-0.12/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatStorageHandler.java
Wed Sep 25 03:13:29 2013
@@ -31,7 +31,7 @@ import org.apache.hadoop.mapred.OutputFo
/**
* The abstract Class HCatStorageHandler would server as the base class for all
* the storage handlers required for non-native tables in HCatalog.
- * @deprecated Use/modify {@link
org.apache.hcatalog.mapreduce.HCatStorageHandler} instead
+ * @deprecated Use/modify {@link
org.apache.hadoop.hive.ql.metadata.HiveStorageHandler} instead
*/
public abstract class HCatStorageHandler implements HiveStorageHandler {
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBaseOutputFormat.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBaseOutputFormat.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBaseOutputFormat.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBaseOutputFormat.java
Wed Sep 25 03:13:29 2013
@@ -32,9 +32,9 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.util.Progressable;
-import org.apache.hive.hcatalog.common.HCatConstants;
-import org.apache.hive.hcatalog.common.HCatUtil;
-import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.mapreduce.OutputJobInfo;
public class HBaseBaseOutputFormat implements
OutputFormat<WritableComparable<?>, Put>,
HiveOutputFormat<WritableComparable<?>, Put> {
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseConstants.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseConstants.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseConstants.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseConstants.java
Wed Sep 25 03:13:29 2013
@@ -19,7 +19,7 @@
package org.apache.hcatalog.hbase;
-import org.apache.hive.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.common.HCatConstants;
/**
* Constants class for constants used in HBase storage handler.
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
Wed Sep 25 03:13:29 2013
@@ -47,7 +47,6 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
import org.apache.hadoop.hive.serde2.SerDe;
@@ -55,18 +54,19 @@ import org.apache.hadoop.mapred.InputFor
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.util.StringUtils;
-import org.apache.hive.hcatalog.common.HCatConstants;
-import org.apache.hive.hcatalog.common.HCatUtil;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.mapreduce.HCatStorageHandler;
import
org.apache.hcatalog.hbase.HBaseBulkOutputFormat.HBaseBulkOutputCommitter;
import
org.apache.hcatalog.hbase.HBaseDirectOutputFormat.HBaseDirectOutputCommitter;
import org.apache.hcatalog.hbase.snapshot.RevisionManager;
import org.apache.hcatalog.hbase.snapshot.RevisionManagerConfiguration;
import org.apache.hcatalog.hbase.snapshot.Transaction;
-import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.HCatTableInfo;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
-import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
+import org.apache.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hcatalog.mapreduce.HCatTableInfo;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.mapreduce.OutputJobInfo;
import org.apache.thrift.TBase;
import org.apache.zookeeper.ZooKeeper;
@@ -77,8 +77,13 @@ import com.google.common.util.concurrent
* This class HBaseHCatStorageHandler provides functionality to create HBase
* tables through HCatalog. The implementation is very similar to the
* HiveHBaseStorageHandler, with more details to suit HCatalog.
+ *
+ * Note : As of 0.12, this class is considered deprecated and a candidate for
future removal
+ * All new code must use the Hive HBaseStorageHandler instead
+ *
+ * @deprecated Use/modify {@link
org.apache.hadoop.hive.hbase.HBaseStorageHandler} instead
*/
-public class HBaseHCatStorageHandler extends DefaultStorageHandler implements
HiveMetaHook, Configurable {
+public class HBaseHCatStorageHandler extends HCatStorageHandler implements
HiveMetaHook, Configurable {
public final static String DEFAULT_PREFIX = "default.";
private final static String PROPERTY_INT_OUTPUT_LOCATION =
"hcat.hbase.mapreduce.intermediateOutputLocation";
@@ -448,6 +453,7 @@ public class HBaseHCatStorageHandler ext
return HBaseInputFormat.class;
}
+ @Deprecated
@Override
public Class<? extends OutputFormat> getOutputFormatClass() {
return HBaseBaseOutputFormat.class;
@@ -468,12 +474,19 @@ public class HBaseHCatStorageHandler ext
return HBaseSerDe.class;
}
+ @Deprecated
public Configuration getJobConf() {
return jobConf;
}
@Deprecated
@Override
+ public void configureJobConf(TableDesc tableDesc, JobConf jobConf) {
+ // do nothing
+ }
+
+ @Deprecated
+ @Override
public Configuration getConf() {
if (hbaseConf == null) {
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java
Wed Sep 25 03:13:29 2013
@@ -34,9 +34,9 @@ import org.apache.hadoop.mapred.InputSpl
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
-import org.apache.hive.hcatalog.common.HCatConstants;
-import org.apache.hive.hcatalog.common.HCatUtil;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
/**
* This class HBaseInputFormat is a wrapper class of TableInputFormat in HBase.
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java
Wed Sep 25 03:13:29 2013
@@ -29,18 +29,18 @@ import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.hbase.HBaseSerDe;
-import org.apache.hive.hcatalog.common.HCatConstants;
-import org.apache.hive.hcatalog.common.HCatUtil;
-import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hcatalog.data.schema.HCatSchema;
import org.apache.hcatalog.hbase.snapshot.RevisionManager;
import org.apache.hcatalog.hbase.snapshot.RevisionManagerFactory;
import org.apache.hcatalog.hbase.snapshot.TableSnapshot;
import org.apache.hcatalog.hbase.snapshot.Transaction;
-import org.apache.hive.hcatalog.mapreduce.HCatTableInfo;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
-import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
-import org.apache.hive.hcatalog.mapreduce.StorerInfo;
+import org.apache.hcatalog.mapreduce.HCatTableInfo;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.mapreduce.OutputJobInfo;
+import org.apache.hcatalog.mapreduce.StorerInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HbaseSnapshotRecordReader.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HbaseSnapshotRecordReader.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HbaseSnapshotRecordReader.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HbaseSnapshotRecordReader.java
Wed Sep 25 03:13:29 2013
@@ -38,11 +38,11 @@ import org.apache.hadoop.hbase.util.Byte
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.mapred.RecordReader;
-import org.apache.hive.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.hbase.snapshot.FamilyRevision;
import org.apache.hcatalog.hbase.snapshot.RevisionManager;
import org.apache.hcatalog.hbase.snapshot.TableSnapshot;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/ResultConverter.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/ResultConverter.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/ResultConverter.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/ResultConverter.java
Wed Sep 25 03:13:29 2013
@@ -21,7 +21,7 @@ package org.apache.hcatalog.hbase;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
-import org.apache.hive.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.data.HCatRecord;
import java.io.IOException;
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
Wed Sep 25 03:13:29 2013
@@ -47,13 +47,13 @@ import org.apache.hadoop.mapreduce.Mappe
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.apache.hive.hcatalog.cli.HCatDriver;
-import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hive.hcatalog.common.HCatConstants;
-import org.apache.hive.hcatalog.common.HCatUtil;
-import org.apache.hive.hcatalog.data.DefaultHCatRecord;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.cli.HCatDriver;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.DefaultHCatRecord;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.data.schema.HCatSchema;
import
org.apache.hcatalog.hbase.HBaseBulkOutputFormat.HBaseBulkOutputCommitter;
import
org.apache.hcatalog.hbase.TestHBaseDirectOutputFormat.MapReadAbortedTransaction;
import
org.apache.hcatalog.hbase.TestHBaseDirectOutputFormat.MapWriteAbortTransaction;
@@ -62,9 +62,9 @@ import org.apache.hcatalog.hbase.snapsho
import org.apache.hcatalog.hbase.snapshot.RevisionManagerConfiguration;
import org.apache.hcatalog.hbase.snapshot.TableSnapshot;
import org.apache.hcatalog.hbase.snapshot.Transaction;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hcatalog.mapreduce.OutputJobInfo;
import org.junit.Test;
import org.slf4j.Logger;
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
Wed Sep 25 03:13:29 2013
@@ -48,21 +48,21 @@ import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.apache.hive.hcatalog.cli.HCatDriver;
-import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hive.hcatalog.common.HCatConstants;
-import org.apache.hive.hcatalog.common.HCatUtil;
-import org.apache.hive.hcatalog.data.DefaultHCatRecord;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.cli.HCatDriver;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.DefaultHCatRecord;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.data.schema.HCatSchema;
import org.apache.hcatalog.hbase.snapshot.FamilyRevision;
import org.apache.hcatalog.hbase.snapshot.RevisionManager;
import org.apache.hcatalog.hbase.snapshot.RevisionManagerConfiguration;
import org.apache.hcatalog.hbase.snapshot.TableSnapshot;
import org.apache.hcatalog.hbase.snapshot.Transaction;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hcatalog.mapreduce.OutputJobInfo;
import org.junit.Test;
import java.io.IOException;
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
Wed Sep 25 03:13:29 2013
@@ -36,8 +36,8 @@ import org.apache.hadoop.hive.conf.HiveC
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hive.hcatalog.cli.HCatDriver;
-import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.hcatalog.cli.HCatDriver;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
import org.apache.hcatalog.hbase.snapshot.RevisionManager;
import org.apache.hcatalog.hbase.snapshot.RevisionManagerConfiguration;
import org.apache.zookeeper.KeeperException.NoNodeException;
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
Wed Sep 25 03:13:29 2013
@@ -59,20 +59,20 @@ import org.apache.hadoop.mapred.RunningJ
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.apache.hive.hcatalog.cli.HCatDriver;
-import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hive.hcatalog.common.HCatConstants;
-import org.apache.hive.hcatalog.common.HCatException;
-import org.apache.hive.hcatalog.common.HCatUtil;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.cli.HCatDriver;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.common.HCatException;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hcatalog.data.schema.HCatSchema;
import org.apache.hcatalog.hbase.snapshot.RevisionManager;
import org.apache.hcatalog.hbase.snapshot.RevisionManagerConfiguration;
import org.apache.hcatalog.hbase.snapshot.Transaction;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
-import org.apache.hive.hcatalog.mapreduce.PartInfo;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.mapreduce.PartInfo;
import org.junit.Test;
public class TestHCatHBaseInputFormat extends SkeletonHBaseTest {
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
Wed Sep 25 03:13:29 2013
@@ -34,13 +34,13 @@ import org.apache.hadoop.hive.conf.HiveC
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.mapreduce.Job;
-import org.apache.hive.hcatalog.cli.HCatDriver;
-import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hive.hcatalog.common.HCatConstants;
-import org.apache.hive.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.cli.HCatDriver;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.hbase.snapshot.TableSnapshot;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
import org.junit.Test;
public class TestSnapshots extends SkeletonHBaseTest {
Modified:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java?rev=1526095&r1=1526094&r2=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
Wed Sep 25 03:13:29 2013
@@ -33,8 +33,8 @@ import org.apache.hadoop.hive.conf.HiveC
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hive.hcatalog.cli.HCatDriver;
-import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.hcatalog.cli.HCatDriver;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
import org.apache.hcatalog.hbase.SkeletonHBaseTest;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
Added:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java?rev=1526095&view=auto
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
(added)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
Wed Sep 25 03:13:29 2013
@@ -0,0 +1,370 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hive.hcatalog.hbase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MiniMRCluster;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.ServerSocket;
+
+/**
+ * MiniCluster class composed of a number of Hadoop Minicluster implementations
+ * and other necessary daemons needed for testing (HBase, Hive MetaStore,
Zookeeper, MiniMRCluster)
+ */
+public class ManyMiniCluster {
+
+ //MR stuff
+ private boolean miniMRClusterEnabled;
+ private MiniMRCluster mrCluster;
+ private int numTaskTrackers;
+ private JobConf jobConf;
+
+ //HBase stuff
+ private boolean miniHBaseClusterEnabled;
+ private MiniHBaseCluster hbaseCluster;
+ private String hbaseRoot;
+ private Configuration hbaseConf;
+ private String hbaseDir;
+
+ //ZK Stuff
+ private boolean miniZookeeperClusterEnabled;
+ private MiniZooKeeperCluster zookeeperCluster;
+ private int zookeeperPort;
+ private String zookeeperDir;
+
+ //DFS Stuff
+ private MiniDFSCluster dfsCluster;
+
+ //Hive Stuff
+ private boolean miniHiveMetastoreEnabled;
+ private HiveConf hiveConf;
+ private HiveMetaStoreClient hiveMetaStoreClient;
+
+ private final File workDir;
+ private boolean started = false;
+
+
+ /**
+ * create a cluster instance using a builder which will expose configurable
options
+ * @param workDir working directory ManyMiniCluster will use for all of it's
*Minicluster instances
+ * @return a Builder instance
+ */
+ public static Builder create(File workDir) {
+ return new Builder(workDir);
+ }
+
+ private ManyMiniCluster(Builder b) {
+ workDir = b.workDir;
+ numTaskTrackers = b.numTaskTrackers;
+ hiveConf = b.hiveConf;
+ jobConf = b.jobConf;
+ hbaseConf = b.hbaseConf;
+ miniMRClusterEnabled = b.miniMRClusterEnabled;
+ miniHBaseClusterEnabled = b.miniHBaseClusterEnabled;
+ miniHiveMetastoreEnabled = b.miniHiveMetastoreEnabled;
+ miniZookeeperClusterEnabled = b.miniZookeeperClusterEnabled;
+ }
+
+ protected synchronized void start() {
+ try {
+ if (!started) {
+ FileUtil.fullyDelete(workDir);
+ if (miniMRClusterEnabled) {
+ setupMRCluster();
+ }
+ if (miniZookeeperClusterEnabled || miniHBaseClusterEnabled) {
+ miniZookeeperClusterEnabled = true;
+ setupZookeeper();
+ }
+ if (miniHBaseClusterEnabled) {
+ setupHBaseCluster();
+ }
+ if (miniHiveMetastoreEnabled) {
+ setUpMetastore();
+ }
+ }
+ } catch (Exception e) {
+ throw new IllegalStateException("Failed to setup cluster", e);
+ }
+ }
+
+ protected synchronized void stop() {
+ if (hbaseCluster != null) {
+ HConnectionManager.deleteAllConnections(true);
+ try {
+ hbaseCluster.shutdown();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ hbaseCluster = null;
+ }
+ if (zookeeperCluster != null) {
+ try {
+ zookeeperCluster.shutdown();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ zookeeperCluster = null;
+ }
+ if (mrCluster != null) {
+ try {
+ mrCluster.shutdown();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ mrCluster = null;
+ }
+ if (dfsCluster != null) {
+ try {
+ dfsCluster.getFileSystem().close();
+ dfsCluster.shutdown();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ dfsCluster = null;
+ }
+ try {
+ FileSystem.closeAll();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ started = false;
+ }
+
+ /**
+ * @return Configuration of mini HBase cluster
+ */
+ public Configuration getHBaseConf() {
+ return HBaseConfiguration.create(hbaseConf);
+ }
+
+ /**
+ * @return Configuration of mini MR cluster
+ */
+ public Configuration getJobConf() {
+ return new Configuration(jobConf);
+ }
+
+ /**
+ * @return Configuration of Hive Metastore, this is a standalone not a daemon
+ */
+ public HiveConf getHiveConf() {
+ return new HiveConf(hiveConf);
+ }
+
+ /**
+ * @return Filesystem used by MiniMRCluster and MiniHBaseCluster
+ */
+ public FileSystem getFileSystem() {
+ try {
+ return FileSystem.get(jobConf);
+ } catch (IOException e) {
+ throw new IllegalStateException("Failed to get FileSystem", e);
+ }
+ }
+
+ /**
+ * @return Metastore client instance
+ */
+ public HiveMetaStoreClient getHiveMetaStoreClient() {
+ return hiveMetaStoreClient;
+ }
+
+ private void setupMRCluster() {
+ try {
+ final int jobTrackerPort = findFreePort();
+ final int taskTrackerPort = findFreePort();
+
+ if (jobConf == null)
+ jobConf = new JobConf();
+
+ jobConf.setInt("mapred.submit.replication", 1);
+ jobConf.set("yarn.scheduler.capacity.root.queues", "default");
+ jobConf.set("yarn.scheduler.capacity.root.default.capacity", "100");
+ //conf.set("hadoop.job.history.location",new
File(workDir).getAbsolutePath()+"/history");
+ System.setProperty("hadoop.log.dir", new File(workDir,
"/logs").getAbsolutePath());
+
+ mrCluster = new MiniMRCluster(jobTrackerPort,
+ taskTrackerPort,
+ numTaskTrackers,
+ getFileSystem().getUri().toString(),
+ numTaskTrackers,
+ null,
+ null,
+ null,
+ jobConf);
+
+ jobConf = mrCluster.createJobConf();
+ } catch (IOException e) {
+ throw new IllegalStateException("Failed to Setup MR Cluster", e);
+ }
+ }
+
+ private void setupZookeeper() {
+ try {
+ zookeeperDir = new File(workDir, "zk").getAbsolutePath();
+ zookeeperPort = findFreePort();
+ zookeeperCluster = new MiniZooKeeperCluster();
+ zookeeperCluster.setDefaultClientPort(zookeeperPort);
+ zookeeperCluster.startup(new File(zookeeperDir));
+ } catch (Exception e) {
+ throw new IllegalStateException("Failed to Setup Zookeeper Cluster", e);
+ }
+ }
+
+ private void setupHBaseCluster() {
+ final int numRegionServers = 1;
+
+ try {
+ hbaseDir = new File(workDir, "hbase").toString();
+ hbaseDir = hbaseDir.replaceAll("\\\\", "/");
+ hbaseRoot = "file://" + hbaseDir;
+
+ if (hbaseConf == null)
+ hbaseConf = HBaseConfiguration.create();
+
+ hbaseConf.set("hbase.rootdir", hbaseRoot);
+ hbaseConf.set("hbase.master", "local");
+ hbaseConf.setInt(HConstants.ZOOKEEPER_CLIENT_PORT, zookeeperPort);
+ hbaseConf.set(HConstants.ZOOKEEPER_QUORUM, "127.0.0.1");
+ hbaseConf.setInt("hbase.master.port", findFreePort());
+ hbaseConf.setInt("hbase.master.info.port", -1);
+ hbaseConf.setInt("hbase.regionserver.port", findFreePort());
+ hbaseConf.setInt("hbase.regionserver.info.port", -1);
+
+ hbaseCluster = new MiniHBaseCluster(hbaseConf, numRegionServers);
+ hbaseConf.set("hbase.master",
hbaseCluster.getMaster().getServerName().getHostAndPort());
+ //opening the META table ensures that cluster is running
+ new HTable(hbaseConf, HConstants.META_TABLE_NAME);
+ } catch (Exception e) {
+ throw new IllegalStateException("Failed to setup HBase Cluster", e);
+ }
+ }
+
+ private void setUpMetastore() throws Exception {
+ if (hiveConf == null)
+ hiveConf = new HiveConf(this.getClass());
+
+ //The default org.apache.hadoop.hive.ql.hooks.PreExecutePrinter hook
+ //is present only in the ql/test directory
+ hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+ hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+ hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+ hiveConf.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname,
+ "jdbc:derby:" + new File(workDir + "/metastore_db") + ";create=true");
+ hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.toString(),
+ new File(workDir, "warehouse").toString());
+ //set where derby logs
+ File derbyLogFile = new File(workDir + "/derby.log");
+ derbyLogFile.createNewFile();
+ System.setProperty("derby.stream.error.file", derbyLogFile.getPath());
+
+
+// Driver driver = new Driver(hiveConf);
+// SessionState.start(new CliSessionState(hiveConf));
+
+ hiveMetaStoreClient = new HiveMetaStoreClient(hiveConf);
+ }
+
+ private static int findFreePort() throws IOException {
+ ServerSocket server = new ServerSocket(0);
+ int port = server.getLocalPort();
+ server.close();
+ return port;
+ }
+
+ public static class Builder {
+ private File workDir;
+ private int numTaskTrackers = 1;
+ private JobConf jobConf;
+ private Configuration hbaseConf;
+ private HiveConf hiveConf;
+
+ private boolean miniMRClusterEnabled = true;
+ private boolean miniHBaseClusterEnabled = true;
+ private boolean miniHiveMetastoreEnabled = true;
+ private boolean miniZookeeperClusterEnabled = true;
+
+
+ private Builder(File workDir) {
+ this.workDir = workDir;
+ }
+
+ public Builder numTaskTrackers(int num) {
+ numTaskTrackers = num;
+ return this;
+ }
+
+ public Builder jobConf(JobConf jobConf) {
+ this.jobConf = jobConf;
+ return this;
+ }
+
+ public Builder hbaseConf(Configuration hbaseConf) {
+ this.hbaseConf = hbaseConf;
+ return this;
+ }
+
+ public Builder hiveConf(HiveConf hiveConf) {
+ this.hiveConf = hiveConf;
+ return this;
+ }
+
+ public Builder miniMRClusterEnabled(boolean enabled) {
+ this.miniMRClusterEnabled = enabled;
+ return this;
+ }
+
+ public Builder miniHBaseClusterEnabled(boolean enabled) {
+ this.miniHBaseClusterEnabled = enabled;
+ return this;
+ }
+
+ public Builder miniZookeeperClusterEnabled(boolean enabled) {
+ this.miniZookeeperClusterEnabled = enabled;
+ return this;
+ }
+
+ public Builder miniHiveMetastoreEnabled(boolean enabled) {
+ this.miniHiveMetastoreEnabled = enabled;
+ return this;
+ }
+
+
+ public ManyMiniCluster build() {
+ return new ManyMiniCluster(this);
+ }
+
+ }
+}
Added:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/SkeletonHBaseTest.java
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/SkeletonHBaseTest.java?rev=1526095&view=auto
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/SkeletonHBaseTest.java
(added)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/SkeletonHBaseTest.java
Wed Sep 25 03:13:29 2013
@@ -0,0 +1,241 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hive.hcatalog.hbase;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+/**
+ * Base class for HBase Tests which need a mini cluster instance
+ */
+public abstract class SkeletonHBaseTest {
+
+ protected static String TEST_DIR = "/tmp/build/test/data/";
+
+ protected final static String DEFAULT_CONTEXT_HANDLE = "default";
+
+ protected static Map<String, Context> contextMap = new HashMap<String,
Context>();
+ protected static Set<String> tableNames = new HashSet<String>();
+
+ /**
+ * Allow tests to alter the default MiniCluster configuration.
+ * (requires static initializer block as all setup here is static)
+ */
+ protected static Configuration testConf = null;
+
+ protected void createTable(String tableName, String[] families) {
+ try {
+ HBaseAdmin admin = new HBaseAdmin(getHbaseConf());
+ HTableDescriptor tableDesc = new HTableDescriptor(tableName);
+ for (String family : families) {
+ HColumnDescriptor columnDescriptor = new HColumnDescriptor(family);
+ tableDesc.addFamily(columnDescriptor);
+ }
+ admin.createTable(tableDesc);
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw new IllegalStateException(e);
+ }
+
+ }
+
+ protected String newTableName(String prefix) {
+ String name = null;
+ int tries = 100;
+ do {
+ name = prefix + "_" + Math.abs(new Random().nextLong());
+ } while (tableNames.contains(name) && --tries > 0);
+ if (tableNames.contains(name))
+ throw new IllegalStateException("Couldn't find a unique table name,
tableNames size: " + tableNames.size());
+ tableNames.add(name);
+ return name;
+ }
+
+
+ /**
+ * startup an hbase cluster instance before a test suite runs
+ */
+ @BeforeClass
+ public static void setup() {
+ if (!contextMap.containsKey(getContextHandle()))
+ contextMap.put(getContextHandle(), new Context(getContextHandle()));
+
+ contextMap.get(getContextHandle()).start();
+ }
+
+ /**
+ * shutdown an hbase cluster instance ant the end of the test suite
+ */
+ @AfterClass
+ public static void tearDown() {
+ contextMap.get(getContextHandle()).stop();
+ }
+
+ /**
+ * override this with a different context handle if tests suites are run
simultaneously
+ * and ManyMiniCluster instances shouldn't be shared
+ * @return
+ */
+ public static String getContextHandle() {
+ return DEFAULT_CONTEXT_HANDLE;
+ }
+
+ /**
+ * @return working directory for a given test context, which normally is a
test suite
+ */
+ public String getTestDir() {
+ return contextMap.get(getContextHandle()).getTestDir();
+ }
+
+ /**
+ * @return ManyMiniCluster instance
+ */
+ public ManyMiniCluster getCluster() {
+ return contextMap.get(getContextHandle()).getCluster();
+ }
+
+ /**
+ * @return configuration of MiniHBaseCluster
+ */
+ public Configuration getHbaseConf() {
+ return contextMap.get(getContextHandle()).getHbaseConf();
+ }
+
+ /**
+ * @return configuration of MiniMRCluster
+ */
+ public Configuration getJobConf() {
+ return contextMap.get(getContextHandle()).getJobConf();
+ }
+
+ /**
+ * @return configuration of Hive Metastore
+ */
+ public HiveConf getHiveConf() {
+ return contextMap.get(getContextHandle()).getHiveConf();
+ }
+
+ /**
+ * @return filesystem used by ManyMiniCluster daemons
+ */
+ public FileSystem getFileSystem() {
+ return contextMap.get(getContextHandle()).getFileSystem();
+ }
+
+ /**
+ * class used to encapsulate a context which is normally used by
+ * a single TestSuite or across TestSuites when multi-threaded testing is
turned on
+ */
+ public static class Context {
+ protected String testDir;
+ protected ManyMiniCluster cluster;
+
+ protected Configuration hbaseConf;
+ protected Configuration jobConf;
+ protected HiveConf hiveConf;
+
+ protected FileSystem fileSystem;
+
+ protected int usageCount = 0;
+
+ public Context(String handle) {
+ try {
+ testDir = new File(TEST_DIR + "/test_" + handle + "_" + Math.abs(new
Random().nextLong()) + "/").getCanonicalPath();
+ System.out.println("Cluster work directory: " + testDir);
+ } catch (IOException e) {
+ throw new IllegalStateException("Failed to generate testDir", e);
+ }
+ }
+
+ public void start() {
+ if (usageCount++ == 0) {
+ ManyMiniCluster.Builder b = ManyMiniCluster.create(new File(testDir));
+ if (testConf != null) {
+ b.hbaseConf(HBaseConfiguration.create(testConf));
+ }
+ cluster = b.build();
+ cluster.start();
+ this.hbaseConf = cluster.getHBaseConf();
+ jobConf = cluster.getJobConf();
+ fileSystem = cluster.getFileSystem();
+ hiveConf = cluster.getHiveConf();
+ }
+ }
+
+ public void stop() {
+ if (--usageCount == 0) {
+ try {
+ cluster.stop();
+ cluster = null;
+ } finally {
+ System.out.println("Trying to cleanup: " + testDir);
+ try {
+ FileSystem fs = FileSystem.get(jobConf);
+ fs.delete(new Path(testDir), true);
+ } catch (IOException e) {
+ throw new IllegalStateException("Failed to cleanup test dir", e);
+ }
+
+ }
+ }
+ }
+
+ public String getTestDir() {
+ return testDir;
+ }
+
+ public ManyMiniCluster getCluster() {
+ return cluster;
+ }
+
+ public Configuration getHbaseConf() {
+ return hbaseConf;
+ }
+
+ public Configuration getJobConf() {
+ return jobConf;
+ }
+
+ public HiveConf getHiveConf() {
+ return hiveConf;
+ }
+
+ public FileSystem getFileSystem() {
+ return fileSystem;
+ }
+ }
+
+}
Copied:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java
(from r1526094,
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java)
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java?p2=hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java&p1=hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java&r1=1526094&r2=1526095&rev=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java
Wed Sep 25 03:13:29 2013
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.apache.hcatalog.hbase;
+package org.apache.hive.hcatalog.hbase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
Copied:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHiveHBaseStorageHandler.java
(from r1526094,
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHiveHBaseStorageHandler.java)
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHiveHBaseStorageHandler.java?p2=hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHiveHBaseStorageHandler.java&p1=hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHiveHBaseStorageHandler.java&r1=1526094&r2=1526095&rev=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHiveHBaseStorageHandler.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHiveHBaseStorageHandler.java
Wed Sep 25 03:13:29 2013
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.apache.hcatalog.hbase;
+package org.apache.hive.hcatalog.hbase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
Copied:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHiveHBaseTableOutputFormat.java
(from r1526094,
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHiveHBaseTableOutputFormat.java)
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHiveHBaseTableOutputFormat.java?p2=hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHiveHBaseTableOutputFormat.java&p1=hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHiveHBaseTableOutputFormat.java&r1=1526094&r2=1526095&rev=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHiveHBaseTableOutputFormat.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHiveHBaseTableOutputFormat.java
Wed Sep 25 03:13:29 2013
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.apache.hcatalog.hbase;
+package org.apache.hive.hcatalog.hbase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
Copied:
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
(from r1526094,
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestPigHBaseStorageHandler.java)
URL:
http://svn.apache.org/viewvc/hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java?p2=hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java&p1=hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestPigHBaseStorageHandler.java&r1=1526094&r2=1526095&rev=1526095&view=diff
==============================================================================
---
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestPigHBaseStorageHandler.java
(original)
+++
hive/branches/branch-0.12/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
Wed Sep 25 03:13:29 2013
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.apache.hcatalog.hbase;
+package org.apache.hive.hcatalog.hbase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -176,7 +176,7 @@ public class TestPigHBaseStorageHandler
assertEquals("key",fields.get(0).alias.toLowerCase());
assertEquals( DataType.CHARARRAY,fields.get(1).type);
- assertEquals("testQualifier1".toLowerCase(),
fields.get(1).alias.toLowerCase());
+ assertEquals("testQualifier1".toLowerCase(),
fields.get(1).alias.toLowerCase());
assertEquals( DataType.INTEGER,fields.get(2).type);
assertEquals("testQualifier2".toLowerCase(),
fields.get(2).alias.toLowerCase());
@@ -266,7 +266,7 @@ public class TestPigHBaseStorageHandler
String tableQuery = "CREATE TABLE " + databaseName + "." + tableName
+ "(key int, testqualifier1 float, testqualifier2 string) STORED BY " +
"'org.apache.hadoop.hive.hbase.HBaseStorageHandler'"
- + " WITH SERDEPROPERTIES
('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')"
+ + " WITH SERDEPROPERTIES
('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')"
+ " TBLPROPERTIES ('hbase.table.default.storage.type'='binary')";
@@ -305,10 +305,10 @@ public class TestPigHBaseStorageHandler
assertEquals("key",fields.get(0).alias.toLowerCase());
assertEquals( DataType.FLOAT,fields.get(1).type);
- assertEquals("testQualifier1".toLowerCase(),
fields.get(1).alias.toLowerCase());
+ assertEquals("testQualifier1".toLowerCase(),
fields.get(1).alias.toLowerCase());
assertEquals( DataType.CHARARRAY,fields.get(2).type);
- assertEquals("testQualifier2".toLowerCase(),
fields.get(2).alias.toLowerCase());
+ assertEquals("testQualifier2".toLowerCase(),
fields.get(2).alias.toLowerCase());
//Query the hbase table and check the key is valid and only 5 are present
Configuration conf = new Configuration(getHbaseConf());