Author: toffer
Date: Fri Mar 9 22:35:25 2012
New Revision: 1299080
URL: http://svn.apache.org/viewvc?rev=1299080&view=rev
Log:
HCATALOG-303 Fix build failure due to HBaseSerDe.parseColumnMapping method
signature change in hive (rohini via toffer)
Added:
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java
Removed:
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseMapredUtil.java
Modified:
incubator/hcatalog/trunk/CHANGES.txt
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java
Modified: incubator/hcatalog/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/CHANGES.txt?rev=1299080&r1=1299079&r2=1299080&view=diff
==============================================================================
--- incubator/hcatalog/trunk/CHANGES.txt (original)
+++ incubator/hcatalog/trunk/CHANGES.txt Fri Mar 9 22:35:25 2012
@@ -79,6 +79,8 @@ Release 0.4.0 - Unreleased
OPTIMIZATIONS
BUG FIXES
+ HCAT-303 Fix build failure due to HBaseSerDe.parseColumnMapping method
signature change in hive (rohini via toffer)
+
HCAT-282 HCatInputFormat shouldn't expect storageHandler to be serializable
(khorgath via gates)
HCAT-281 HCat use table schema instead of partition schema to read partition
data (daijy and khorgath via gates)
Modified:
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java?rev=1299080&r1=1299079&r2=1299080&view=diff
==============================================================================
---
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java
(original)
+++
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java
Fri Mar 9 22:35:25 2012
@@ -65,7 +65,7 @@ class HBaseBulkOutputFormat extends HBas
job.setOutputValueClass(Put.class);
job.setOutputCommitter(HBaseBulkOutputCommitter.class);
baseOutputFormat.checkOutputSpecs(ignored, job);
- HBaseMapredUtil.addHBaseDelegationToken(job);
+ HBaseUtil.addHBaseDelegationToken(job);
addJTDelegationToken(job);
}
Modified:
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java?rev=1299080&r1=1299079&r2=1299080&view=diff
==============================================================================
---
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java
(original)
+++
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java
Fri Mar 9 22:35:25 2012
@@ -67,7 +67,7 @@ class HBaseDirectOutputFormat extends HB
job.setIfUnset(TableOutputFormat.OUTPUT_TABLE,
job.get(HBaseConstants.PROPERTY_OUTPUT_TABLE_NAME_KEY));
outputFormat.checkOutputSpecs(ignored, job);
- HBaseMapredUtil.addHBaseDelegationToken(job);
+ HBaseUtil.addHBaseDelegationToken(job);
}
private static class HBaseDirectRecordWriter implements
Modified:
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java?rev=1299080&r1=1299079&r2=1299080&view=diff
==============================================================================
---
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
(original)
+++
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
Fri Mar 9 22:35:25 2012
@@ -244,10 +244,9 @@ public class HBaseHCatStorageHandler ext
List<String> hbaseColumnFamilies = new ArrayList<String>();
List<String> hbaseColumnQualifiers = new ArrayList<String>();
List<byte[]> hbaseColumnFamiliesBytes = new ArrayList<byte[]>();
- List<byte[]> hbaseColumnQualifiersBytes = new ArrayList<byte[]>();
- int iKey = HBaseSerDe.parseColumnMapping(hbaseColumnsMapping,
+ int iKey = HBaseUtil.parseColumnMapping(hbaseColumnsMapping,
hbaseColumnFamilies, hbaseColumnFamiliesBytes,
- hbaseColumnQualifiers, hbaseColumnQualifiersBytes);
+ hbaseColumnQualifiers, null);
HTableDescriptor tableDesc;
Set<String> uniqueColumnFamilies = new HashSet<String>();
@@ -313,8 +312,8 @@ public class HBaseHCatStorageHandler ext
throw new MetaException(StringUtils.stringifyException(mnre));
} catch (IOException ie) {
throw new MetaException(StringUtils.stringifyException(ie));
- } catch (SerDeException se) {
- throw new MetaException(StringUtils.stringifyException(se));
+ } catch (IllegalArgumentException iae) {
+ throw new MetaException(StringUtils.stringifyException(iae));
}
}
@@ -556,27 +555,23 @@ public class HBaseHCatStorageHandler ext
int position = tableSchema.getPosition(fieldName);
outputColumnMapping.add(position);
}
- try {
- List<String> columnFamilies = new ArrayList<String>();
- List<String> columnQualifiers = new ArrayList<String>();
- HBaseSerDe.parseColumnMapping(hbaseColumnMapping,
columnFamilies, null,
- columnQualifiers, null);
- for (int i = 0; i < outputColumnMapping.size(); i++) {
- int cfIndex = outputColumnMapping.get(i);
- String cf = columnFamilies.get(cfIndex);
- // We skip the key column.
- if (cf.equals(HBaseSerDe.HBASE_KEY_COL) == false) {
- String qualifier = columnQualifiers.get(i);
- builder.append(cf);
- builder.append(":");
- if (qualifier != null) {
- builder.append(qualifier);
- }
- builder.append(" ");
+ List<String> columnFamilies = new ArrayList<String>();
+ List<String> columnQualifiers = new ArrayList<String>();
+ HBaseUtil.parseColumnMapping(hbaseColumnMapping, columnFamilies,
null,
+ columnQualifiers, null);
+ for (int i = 0; i < outputColumnMapping.size(); i++) {
+ int cfIndex = outputColumnMapping.get(i);
+ String cf = columnFamilies.get(cfIndex);
+ // We skip the key column.
+ if (cf.equals(HBaseSerDe.HBASE_KEY_COL) == false) {
+ String qualifier = columnQualifiers.get(i);
+ builder.append(cf);
+ builder.append(":");
+ if (qualifier != null) {
+ builder.append(qualifier);
}
+ builder.append(" ");
}
- } catch (SerDeException e) {
- throw new IOException(e);
}
}
//Remove the extra space delimiter
Modified:
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java?rev=1299080&r1=1299079&r2=1299080&view=diff
==============================================================================
---
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java
(original)
+++
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java
Fri Mar 9 22:35:25 2012
@@ -105,7 +105,7 @@ class HBaseInputFormat implements InputF
public org.apache.hadoop.mapred.InputSplit[] getSplits(JobConf job, int
numSplits)
throws IOException {
inputFormat.setConf(job);
- HBaseMapredUtil.addHBaseDelegationToken(job);
+ HBaseUtil.addHBaseDelegationToken(job);
return
convertSplits(inputFormat.getSplits(HCatMapRedUtil.createJobContext(job, null,
Reporter.NULL)));
}
Modified:
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java?rev=1299080&r1=1299079&r2=1299080&view=diff
==============================================================================
---
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java
(original)
+++
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java
Fri Mar 9 22:35:25 2012
@@ -32,7 +32,6 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hive.hbase.HBaseSerDe;
-import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hcatalog.common.HCatConstants;
import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.data.schema.HCatFieldSchema;
@@ -273,12 +272,8 @@ class HBaseRevisionManagerUtil {
Map<String, String> hcatHbaseColMap = new HashMap<String, String>();
List<String> columnFamilies = new ArrayList<String>();
List<String> columnQualifiers = new ArrayList<String>();
- try {
- HBaseSerDe.parseColumnMapping(hbaseColumnMapping, columnFamilies,
- null, columnQualifiers, null);
- } catch (SerDeException e) {
- throw new IOException("Exception while converting snapshots.", e);
- }
+ HBaseUtil.parseColumnMapping(hbaseColumnMapping, columnFamilies,
+ null, columnQualifiers, null);
for (HCatFieldSchema column : hcatTableSchema.getFields()) {
int fieldPos = hcatTableSchema.getPosition(column.getName());
Added:
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java?rev=1299080&view=auto
==============================================================================
---
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java
(added)
+++
incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java
Fri Mar 9 22:35:25 2012
@@ -0,0 +1,158 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hcatalog.hbase;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hive.hbase.HBaseSerDe;
+import org.apache.hadoop.mapred.JobConf;
+
+class HBaseUtil {
+
+ private HBaseUtil(){
+ }
+
+ /**
+ * Parses the HBase columns mapping to identify the column families,
qualifiers
+ * and also caches the byte arrays corresponding to them. One of the HCat
table
+ * columns maps to the HBase row key, by default the first column.
+ *
+ * @param columnMapping - the column mapping specification to be parsed
+ * @param colFamilies - the list of HBase column family names
+ * @param colFamiliesBytes - the corresponding byte array
+ * @param colQualifiers - the list of HBase column qualifier names
+ * @param colQualifiersBytes - the corresponding byte array
+ * @return the row key index in the column names list
+ * @throws IOException
+ */
+ static int parseColumnMapping(
+ String columnMapping,
+ List<String> colFamilies,
+ List<byte []> colFamiliesBytes,
+ List<String> colQualifiers,
+ List<byte []> colQualifiersBytes) throws IOException {
+
+ int rowKeyIndex = -1;
+
+ if (colFamilies == null || colQualifiers == null) {
+ throw new IllegalArgumentException("Error: caller must pass in lists
for the column families " +
+ "and qualifiers.");
+ }
+
+ colFamilies.clear();
+ colQualifiers.clear();
+
+ if (columnMapping == null) {
+ throw new IllegalArgumentException("Error: hbase.columns.mapping
missing for this HBase table.");
+ }
+
+ if (columnMapping.equals("") ||
columnMapping.equals(HBaseSerDe.HBASE_KEY_COL)) {
+ throw new IllegalArgumentException("Error: hbase.columns.mapping
specifies only the HBase table"
+ + " row key. A valid Hive-HBase table must specify at least one
additional column.");
+ }
+
+ String [] mapping = columnMapping.split(",");
+
+ for (int i = 0; i < mapping.length; i++) {
+ String elem = mapping[i];
+ int idxFirst = elem.indexOf(":");
+ int idxLast = elem.lastIndexOf(":");
+
+ if (idxFirst < 0 || !(idxFirst == idxLast)) {
+ throw new IllegalArgumentException("Error: the HBase columns mapping
contains a badly formed " +
+ "column family, column qualifier specification.");
+ }
+
+ if (elem.equals(HBaseSerDe.HBASE_KEY_COL)) {
+ rowKeyIndex = i;
+ colFamilies.add(elem);
+ colQualifiers.add(null);
+ } else {
+ String [] parts = elem.split(":");
+ assert(parts.length > 0 && parts.length <= 2);
+ colFamilies.add(parts[0]);
+
+ if (parts.length == 2) {
+ colQualifiers.add(parts[1]);
+ } else {
+ colQualifiers.add(null);
+ }
+ }
+ }
+
+ if (rowKeyIndex == -1) {
+ colFamilies.add(0, HBaseSerDe.HBASE_KEY_COL);
+ colQualifiers.add(0, null);
+ rowKeyIndex = 0;
+ }
+
+ if (colFamilies.size() != colQualifiers.size()) {
+ throw new IOException("Error in parsing the hbase columns mapping.");
+ }
+
+ // populate the corresponding byte [] if the client has passed in a
non-null list
+ if (colFamiliesBytes != null) {
+ colFamiliesBytes.clear();
+
+ for (String fam : colFamilies) {
+ colFamiliesBytes.add(Bytes.toBytes(fam));
+ }
+ }
+
+ if (colQualifiersBytes != null) {
+ colQualifiersBytes.clear();
+
+ for (String qual : colQualifiers) {
+ if (qual == null) {
+ colQualifiersBytes.add(null);
+ } else {
+ colQualifiersBytes.add(Bytes.toBytes(qual));
+ }
+ }
+ }
+
+ if (colFamiliesBytes != null && colQualifiersBytes != null) {
+ if (colFamiliesBytes.size() != colQualifiersBytes.size()) {
+ throw new IOException("Error in caching the bytes for the hbase
column families " +
+ "and qualifiers.");
+ }
+ }
+
+ return rowKeyIndex;
+ }
+
+ /**
+ * Get delegation token from hbase and add it to JobConf
+ * @param job
+ * @throws IOException
+ */
+ static void addHBaseDelegationToken(JobConf job) throws IOException {
+ if (User.isHBaseSecurityEnabled(job)) {
+ try {
+ User.getCurrent().obtainAuthTokenForJob(job);
+ } catch (InterruptedException e) {
+ throw new IOException("Error while obtaining hbase delegation
token", e);
+ }
+ }
+ }
+
+}