Author: khorgath
Date: Tue Oct 11 00:17:40 2011
New Revision: 1181308
URL: http://svn.apache.org/viewvc?rev=1181308&view=rev
Log:
HCATALOG-109 HBase Storage Handler for HCatalog (avandana via khorgath)
Added:
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
incubator/hcatalog/trunk/storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
Modified:
incubator/hcatalog/trunk/CHANGES.txt
Modified: incubator/hcatalog/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/CHANGES.txt?rev=1181308&r1=1181307&r2=1181308&view=diff
==============================================================================
--- incubator/hcatalog/trunk/CHANGES.txt (original)
+++ incubator/hcatalog/trunk/CHANGES.txt Tue Oct 11 00:17:40 2011
@@ -23,6 +23,8 @@ Trunk (unreleased changes)
INCOMPATIBLE CHANGES
NEW FEATURES
+ HCAT-109. HBase Storage Handler for HCatalog (avandana via khorgath)
+
HCAT-119. Output Storage Driver for HBase (Bulk) and HBaseStorageDriver
Composite class (toffer via khorgath)
HCAT-75. Input storage driver for HBase (avandana via khorgath)
Added:
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java?rev=1181308&view=auto
==============================================================================
---
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java
(added)
+++
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java
Tue Oct 11 00:17:40 2011
@@ -0,0 +1,143 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hcatalog.hbase;
+
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.Privilege;
+
+/**
+ * This class is an implementation of HiveAuthorizationProvider to provide
+ * authorization functionality for HBase tables.
+ */
+class HBaseAuthorizationProvider implements HiveAuthorizationProvider {
+
+ @Override
+ public Configuration getConf() {
+ return null;
+ }
+
+ @Override
+ public void setConf(Configuration conf) {
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ *
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
+ * #init(org.apache.hadoop.conf.Configuration)
+ */
+ @Override
+ public void init(Configuration conf) throws HiveException {
+ }
+
+ @Override
+ public HiveAuthenticationProvider getAuthenticator() {
+ return null;
+ }
+
+ @Override
+ public void setAuthenticator(HiveAuthenticationProvider authenticator) {
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ *
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
+ * #authorize(org.apache.hadoop.hive.ql.security.authorization.Privilege[],
+ * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
+ */
+ @Override
+ public void authorize(Privilege[] readRequiredPriv,
+ Privilege[] writeRequiredPriv) throws HiveException,
+ AuthorizationException {
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ *
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
+ * #authorize(org.apache.hadoop.hive.metastore.api.Database,
+ * org.apache.hadoop.hive.ql.security.authorization.Privilege[],
+ * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
+ */
+ @Override
+ public void authorize(Database db, Privilege[] readRequiredPriv,
+ Privilege[] writeRequiredPriv) throws HiveException,
+ AuthorizationException {
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ *
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
+ * #authorize(org.apache.hadoop.hive.ql.metadata.Table,
+ * org.apache.hadoop.hive.ql.security.authorization.Privilege[],
+ * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
+ */
+ @Override
+ public void authorize(Table table, Privilege[] readRequiredPriv,
+ Privilege[] writeRequiredPriv) throws HiveException,
+ AuthorizationException {
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ *
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
+ * #authorize(org.apache.hadoop.hive.ql.metadata.Partition,
+ * org.apache.hadoop.hive.ql.security.authorization.Privilege[],
+ * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
+ */
+ @Override
+ public void authorize(Partition part, Privilege[] readRequiredPriv,
+ Privilege[] writeRequiredPriv) throws HiveException,
+ AuthorizationException {
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ *
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
+ * #authorize(org.apache.hadoop.hive.ql.metadata.Table,
+ * org.apache.hadoop.hive.ql.metadata.Partition, java.util.List,
+ * org.apache.hadoop.hive.ql.security.authorization.Privilege[],
+ * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
+ */
+ @Override
+ public void authorize(Table table, Partition part, List<String> columns,
+ Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
+ throws HiveException, AuthorizationException {
+ }
+
+}
Added:
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java?rev=1181308&view=auto
==============================================================================
---
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
(added)
+++
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
Tue Oct 11 00:17:40 2011
@@ -0,0 +1,384 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hcatalog.hbase;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.ZooKeeperConnectionException;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hive.hbase.HBaseSerDe;
+import org.apache.hadoop.hive.metastore.HiveMetaHook;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.Constants;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
+import
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hcatalog.mapreduce.HCatInputStorageDriver;
+import org.apache.hcatalog.mapreduce.HCatOutputStorageDriver;
+import org.apache.hcatalog.storagehandler.HCatStorageHandler;
+
+/**
+ * This class HBaseHCatStorageHandler provides functionality to create HBase
+ * tables through HCatalog. The implementation is very similar to the
+ * HiveHBaseStorageHandler, with more details to suit HCatalog.
+ */
+public class HBaseHCatStorageHandler extends HCatStorageHandler {
+
+ final static public String DEFAULT_PREFIX = "default.";
+
+ private Configuration hbaseConf;
+
+ private HBaseAdmin admin;
+
+ /*
+ * @return subclass of HCatInputStorageDriver
+ *
+ * @see org.apache.hcatalog.storagehandler.HCatStorageHandler
+ * #getInputStorageDriver()
+ */
+ @Override
+ public Class<? extends HCatInputStorageDriver> getInputStorageDriver() {
+ return HBaseInputStorageDriver.class;
+ }
+
+ /*
+ * @return subclass of HCatOutputStorageDriver
+ *
+ * @see org.apache.hcatalog.storagehandler.HCatStorageHandler
+ * #getOutputStorageDriver()
+ */
+ @Override
+ public Class<? extends HCatOutputStorageDriver> getOutputStorageDriver() {
+ return HBaseOutputStorageDriver.class;
+ }
+
+ /*
+ * @return instance of HiveAuthorizationProvider
+ *
+ * @throws HiveException
+ *
+ * @see org.apache.hcatalog.storagehandler.HCatStorageHandler#
+ * getAuthorizationProvider()
+ */
+ @Override
+ public HiveAuthorizationProvider getAuthorizationProvider()
+ throws HiveException {
+
+ HBaseAuthorizationProvider hbaseAuth = new
HBaseAuthorizationProvider();
+ hbaseAuth.init(getConf());
+ return hbaseAuth;
+ }
+
+ /*
+ * @param table
+ *
+ * @throws MetaException
+ *
+ * @see org.apache.hcatalog.storagehandler.HCatStorageHandler
+ * #commitCreateTable(org.apache.hadoop.hive.metastore.api.Table)
+ */
+ @Override
+ public void commitCreateTable(Table table) throws MetaException {
+ }
+
+ /*
+ * @param instance of table
+ *
+ * @param deleteData
+ *
+ * @throws MetaException
+ *
+ * @see org.apache.hcatalog.storagehandler.HCatStorageHandler
+ * #commitDropTable(org.apache.hadoop.hive.metastore.api.Table, boolean)
+ */
+ @Override
+ public void commitDropTable(Table tbl, boolean deleteData)
+ throws MetaException {
+ checkDeleteTable(tbl);
+
+ }
+
+ /*
+ * @param instance of table
+ *
+ * @throws MetaException
+ *
+ * @see org.apache.hcatalog.storagehandler.HCatStorageHandler
+ * #preCreateTable(org.apache.hadoop.hive.metastore.api.Table)
+ */
+ @Override
+ public void preCreateTable(Table tbl) throws MetaException {
+ boolean isExternal = MetaStoreUtils.isExternalTable(tbl);
+
+ hbaseConf = getConf();
+
+ if (tbl.getSd().getLocation() != null) {
+ throw new MetaException("LOCATION may not be specified for
HBase.");
+ }
+
+ try {
+ String tableName = getHBaseTableName(tbl);
+ String hbaseColumnsMapping = tbl.getParameters().get(
+ HBaseSerDe.HBASE_COLUMNS_MAPPING);
+
+ tbl.putToParameters(HBaseConstants.PROPERTY_COLUMN_MAPPING_KEY,
+ hbaseColumnsMapping);
+
+ if (hbaseColumnsMapping == null) {
+ throw new MetaException(
+ "No hbase.columns.mapping defined in table"
+ + " properties.");
+ }
+
+ List<String> hbaseColumnFamilies = new ArrayList<String>();
+ List<String> hbaseColumnQualifiers = new ArrayList<String>();
+ List<byte[]> hbaseColumnFamiliesBytes = new ArrayList<byte[]>();
+ List<byte[]> hbaseColumnQualifiersBytes = new ArrayList<byte[]>();
+ int iKey = HBaseSerDe.parseColumnMapping(hbaseColumnsMapping,
+ hbaseColumnFamilies, hbaseColumnFamiliesBytes,
+ hbaseColumnQualifiers, hbaseColumnQualifiersBytes);
+
+ HTableDescriptor tableDesc;
+
+ if (!getHBaseAdmin().tableExists(tableName)) {
+ // if it is not an external table then create one
+ if (!isExternal) {
+ // Create the column descriptors
+ tableDesc = new HTableDescriptor(tableName);
+ Set<String> uniqueColumnFamilies = new HashSet<String>(
+ hbaseColumnFamilies);
+ uniqueColumnFamilies.remove(hbaseColumnFamilies.get(iKey));
+
+ for (String columnFamily : uniqueColumnFamilies) {
+ tableDesc.addFamily(new HColumnDescriptor(Bytes
+ .toBytes(columnFamily)));
+ }
+
+ getHBaseAdmin().createTable(tableDesc);
+ } else {
+ // an external table
+ throw new MetaException("HBase table " + tableName
+ + " doesn't exist while the table is "
+ + "declared as an external table.");
+ }
+
+ } else {
+ if (!isExternal) {
+ throw new MetaException("Table " + tableName
+ + " already exists within HBase."
+ + " Use CREATE EXTERNAL TABLE instead to"
+ + " register it in HCatalog.");
+ }
+ // make sure the schema mapping is right
+ tableDesc = getHBaseAdmin().getTableDescriptor(
+ Bytes.toBytes(tableName));
+
+ for (int i = 0; i < hbaseColumnFamilies.size(); i++) {
+ if (i == iKey) {
+ continue;
+ }
+
+ if (!tableDesc.hasFamily(hbaseColumnFamiliesBytes.get(i)))
{
+ throw new MetaException("Column Family "
+ + hbaseColumnFamilies.get(i)
+ + " is not defined in hbase table " +
tableName);
+ }
+ }
+ }
+
+ // ensure the table is online
+ new HTable(hbaseConf, tableDesc.getName());
+ } catch (MasterNotRunningException mnre) {
+ throw new MetaException(StringUtils.stringifyException(mnre));
+ } catch (IOException ie) {
+ throw new MetaException(StringUtils.stringifyException(ie));
+ } catch (SerDeException se) {
+ throw new MetaException(StringUtils.stringifyException(se));
+ }
+
+ }
+
+ /*
+ * @param table
+ *
+ * @throws MetaException
+ *
+ * @see org.apache.hcatalog.storagehandler.HCatStorageHandler
+ * #preDropTable(org.apache.hadoop.hive.metastore.api.Table)
+ */
+ @Override
+ public void preDropTable(Table table) throws MetaException {
+ }
+
+ /*
+ * @param table
+ *
+ * @throws MetaException
+ *
+ * @see org.apache.hcatalog.storagehandler.HCatStorageHandler
+ * #rollbackCreateTable(org.apache.hadoop.hive.metastore.api.Table)
+ */
+ @Override
+ public void rollbackCreateTable(Table table) throws MetaException {
+ checkDeleteTable(table);
+ }
+
+ /*
+ * @param table
+ *
+ * @throws MetaException
+ *
+ * @see org.apache.hcatalog.storagehandler.HCatStorageHandler
+ * #rollbackDropTable(org.apache.hadoop.hive.metastore.api.Table)
+ */
+ @Override
+ public void rollbackDropTable(Table table) throws MetaException {
+ }
+
+ /*
+ * @return instance of HiveMetaHook
+ *
+ * @see org.apache.hcatalog.storagehandler.HCatStorageHandler#getMetaHook()
+ */
+ @Override
+ public HiveMetaHook getMetaHook() {
+ return this;
+ }
+
+ /*
+ * @param tableDesc
+ *
+ * @param jobProperties
+ *
+ * @see org.apache.hcatalog.storagehandler.HCatStorageHandler
+ * #configureTableJobProperties(org.apache.hadoop.hive.ql.plan.TableDesc,
+ * java.util.Map)
+ */
+ @Override
+ public void configureTableJobProperties(TableDesc tableDesc,
+ Map<String, String> jobProperties) {
+ Properties tableProperties = tableDesc.getProperties();
+
+ jobProperties.put(HBaseSerDe.HBASE_COLUMNS_MAPPING,
+ tableProperties.getProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING));
+
+ String tableName = tableProperties
+ .getProperty(HBaseSerDe.HBASE_TABLE_NAME);
+ if (tableName == null) {
+ tableName = tableProperties.getProperty(Constants.META_TABLE_NAME);
+ if (tableName.startsWith(DEFAULT_PREFIX)) {
+ tableName = tableName.substring(DEFAULT_PREFIX.length());
+ }
+ }
+ jobProperties.put(HBaseSerDe.HBASE_TABLE_NAME, tableName);
+
+ }
+
+ private HBaseAdmin getHBaseAdmin() throws MetaException {
+ try {
+ if (admin == null) {
+ admin = new HBaseAdmin(this.getConf());
+ }
+ return admin;
+ } catch (MasterNotRunningException mnre) {
+ throw new MetaException(StringUtils.stringifyException(mnre));
+ } catch (ZooKeeperConnectionException zkce) {
+ throw new MetaException(StringUtils.stringifyException(zkce));
+ }
+ }
+
+ private String getHBaseTableName(Table tbl) {
+ String tableName =
tbl.getParameters().get(HBaseSerDe.HBASE_TABLE_NAME);
+ if (tableName == null) {
+ tableName = tbl.getSd().getSerdeInfo().getParameters()
+ .get(HBaseSerDe.HBASE_TABLE_NAME);
+ }
+ if (tableName == null) {
+ if (tbl.getDbName().equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)) {
+ tableName = tbl.getTableName();
+ } else {
+ tableName = tbl.getDbName() + "." + tbl.getTableName();
+ }
+ }
+ return tableName;
+ }
+
+ /*
+ * @return subclass of SerDe
+ *
+ * @throws UnsupportedOperationException
+ *
+ * @see
+ * org.apache.hcatalog.storagehandler.HCatStorageHandler#getSerDeClass()
+ */
+ @Override
+ public Class<? extends SerDe> getSerDeClass()
+ throws UnsupportedOperationException {
+ return HBaseSerDe.class;
+ }
+
+ @Override
+ public Configuration getConf() {
+
+ if (hbaseConf == null) {
+ hbaseConf = HBaseConfiguration.create();
+ }
+ return hbaseConf;
+ }
+
+ @Override
+ public void setConf(Configuration conf) {
+ hbaseConf = HBaseConfiguration.create(conf);
+ }
+
+ private void checkDeleteTable(Table table) throws MetaException {
+ boolean isExternal = MetaStoreUtils.isExternalTable(table);
+ String tableName = getHBaseTableName(table);
+ try {
+ if (!isExternal && getHBaseAdmin().tableExists(tableName)) {
+ // we have created an HBase table, so we delete it to roll
back;
+ if (getHBaseAdmin().isTableEnabled(tableName)) {
+ getHBaseAdmin().disableTable(tableName);
+ }
+ getHBaseAdmin().deleteTable(tableName);
+ }
+ } catch (IOException ie) {
+ throw new MetaException(StringUtils.stringifyException(ie));
+ }
+ }
+
+}
Added:
incubator/hcatalog/trunk/storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java?rev=1181308&view=auto
==============================================================================
---
incubator/hcatalog/trunk/storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
(added)
+++
incubator/hcatalog/trunk/storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
Tue Oct 11 00:17:40 2011
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hcatalog.hbase;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.net.URI;
+import java.util.Map;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hcatalog.cli.HCatDriver;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.junit.Test;
+
+public class TestHBaseHCatStorageHandler extends SkeletonHBaseTest {
+
+ private static HiveConf hcatConf;
+ private static HCatDriver hcatDriver;
+ private static Warehouse wh;
+
+ public void Initialize() throws Exception {
+
+ hcatConf = getHiveConf();
+ hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
+ HCatSemanticAnalyzer.class.getName());
+ URI fsuri = getFileSystem().getUri();
+ Path whPath = new Path(fsuri.getScheme(), fsuri.getAuthority(),
+ getTestDir());
+ hcatConf.set(HiveConf.ConfVars.HADOOPFS.varname, fsuri.toString());
+ hcatConf.set(ConfVars.METASTOREWAREHOUSE.varname, whPath.toString());
+
+ //Add hbase properties
+
+ for (Map.Entry<String, String> el : getHbaseConf()) {
+ if (el.getKey().startsWith("hbase.")) {
+ hcatConf.set(el.getKey(), el.getValue());
+ }
+ }
+
+ SessionState.start(new CliSessionState(hcatConf));
+ hcatDriver = new HCatDriver();
+
+ }
+
+ @Test
+ public void testTableCreateDrop() throws Exception {
+ Initialize();
+
+ hcatDriver.run("drop table test_table");
+ CommandProcessorResponse response = hcatDriver
+ .run("create table test_table(key int, value string) STORED BY
" +
+
"'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
+ + "TBLPROPERTIES
('hcat.isd'='org.apache.hcatalog.hbase.HBaseInputStorageDriver', " +
+
"'hcat.osd'='org.apache.hcatalog.hbase.HBaseOutputStorageDriver'," +
+ "'hbase.columns.mapping'=':key,cf1:val')");
+
+ assertEquals(0, response.getResponseCode());
+
+ HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf());
+ boolean doesTableExist = hAdmin.tableExists("test_table");
+
+ assertTrue(doesTableExist);
+
+ hcatDriver.run("drop table test_table");
+ doesTableExist = hAdmin.tableExists("test_table");
+
+ assertTrue(doesTableExist == false);
+
+ }
+
+ @Test
+ public void testTableDropNonExistent() throws Exception {
+ Initialize();
+
+ hcatDriver.run("drop table mytable");
+ CommandProcessorResponse response = hcatDriver
+ .run("create table mytable(key int, value string) STORED BY " +
+ "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
+ + "TBLPROPERTIES
('hcat.isd'='org.apache.hcatalog.hbase.HBaseInputStorageDriver', " +
+
"'hcat.osd'='org.apache.hcatalog.hbase.HBaseOutputStorageDriver'," +
+ "'hbase.columns.mapping'=':key,cf1:val')");
+
+ assertEquals(0, response.getResponseCode());
+
+ HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf());
+ boolean doesTableExist = hAdmin.tableExists("mytable");
+ assertTrue(doesTableExist);
+
+ //Now delete the table from hbase
+ if (hAdmin.isTableEnabled("mytable")) {
+ hAdmin.disableTable("mytable");
+ }
+ hAdmin.deleteTable("mytable");
+ doesTableExist = hAdmin.tableExists("mytable");
+ assertTrue(doesTableExist == false);
+
+ CommandProcessorResponse responseTwo = hcatDriver.run("drop table
mytable");
+ assertTrue(responseTwo.getResponseCode() == 0);
+
+ }
+
+ @Test
+ public void testTableCreateExternal() throws Exception {
+
+ String tableName = "testTable";
+ HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf());
+
+ HTableDescriptor tableDesc = new HTableDescriptor(tableName);
+ tableDesc.addFamily(new HColumnDescriptor(Bytes.toBytes("key")));
+ tableDesc.addFamily(new HColumnDescriptor(Bytes.toBytes("familyone")));
+ tableDesc.addFamily(new HColumnDescriptor(Bytes.toBytes("familytwo")));
+
+ hAdmin.createTable(tableDesc);
+ boolean doesTableExist = hAdmin.tableExists(tableName);
+ assertTrue(doesTableExist);
+
+ hcatDriver.run("drop table mytable");
+ CommandProcessorResponse response = hcatDriver
+ .run("create external table mytable(key int, valueone string,
valuetwo string) STORED BY " +
+ "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
+ + "TBLPROPERTIES
('hcat.isd'='org.apache.hcatalog.hbase.HBaseInputStorageDriver', " +
+
"'hcat.osd'='org.apache.hcatalog.hbase.HBaseOutputStorageDriver'," +
+
"'hbase.columns.mapping'=':key,familyone:val,familytwo:val'," +
+ "'hbase.table.name'='testTable')");
+
+ assertEquals(0, response.getResponseCode());
+
+ }
+
+
+}
\ No newline at end of file