Author: travis
Date: Thu Aug 30 18:53:52 2012
New Revision: 1379061
URL: http://svn.apache.org/viewvc?rev=1379061&view=rev
Log:
HCAT-484 HCatalog should use ql.metadata Table and Partition classes
(traviscrawford)
Modified:
incubator/hcatalog/trunk/CHANGES.txt
incubator/hcatalog/trunk/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatLoader.java
incubator/hcatalog/trunk/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHCatUtil.java
Modified: incubator/hcatalog/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/CHANGES.txt?rev=1379061&r1=1379060&r2=1379061&view=diff
==============================================================================
--- incubator/hcatalog/trunk/CHANGES.txt (original)
+++ incubator/hcatalog/trunk/CHANGES.txt Thu Aug 30 18:53:52 2012
@@ -38,6 +38,8 @@ Trunk (unreleased changes)
HCAT-427 Document storage-based authorization (lefty via gates)
IMPROVEMENTS
+ HCAT-484 HCatalog should use ql.metadata Table and Partition classes
(traviscrawford)
+
HCAT-485 Document that storage-based security ignores GRANT/REVOKE
statements (lefty via khorgath)
HCAT-442 Documentation needs update for using HCatalog with pig (lefty via
gates)
Modified:
incubator/hcatalog/trunk/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatLoader.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatLoader.java?rev=1379061&r1=1379060&r2=1379061&view=diff
==============================================================================
---
incubator/hcatalog/trunk/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatLoader.java
(original)
+++
incubator/hcatalog/trunk/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatLoader.java
Thu Aug 30 18:53:52 2012
@@ -27,7 +27,7 @@ import java.util.Properties;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.security.Credentials;
Modified:
incubator/hcatalog/trunk/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java?rev=1379061&r1=1379060&r2=1379061&view=diff
==============================================================================
---
incubator/hcatalog/trunk/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java
(original)
+++
incubator/hcatalog/trunk/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java
Thu Aug 30 18:53:52 2012
@@ -32,7 +32,7 @@ import org.apache.hadoop.hive.conf.HiveC
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hcatalog.common.HCatConstants;
import org.apache.hcatalog.common.HCatException;
@@ -141,7 +141,7 @@ public class PigHCatUtil {
HiveMetaStoreClient client = null;
try {
client = getHiveMetaClient(hcatServerUri, hcatServerPrincipal,
PigHCatUtil.class);
- table = client.getTable(dbName, tableName);
+ table = HCatUtil.getTable(client, dbName, tableName);
} catch (NoSuchObjectException nsoe){
throw new PigException("Table not found : " + nsoe.getMessage(),
PIG_EXCEPTION_CODE); // prettier error messages to frontend
} catch (Exception e) {
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java?rev=1379061&r1=1379060&r2=1379061&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
(original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
Thu Aug 30 18:53:52 2012
@@ -39,9 +39,11 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
@@ -159,15 +161,12 @@ public class HCatUtil {
}
}
- public static HCatSchema extractSchemaFromStorageDescriptor(
- StorageDescriptor sd) throws HCatException {
- if (sd == null) {
- throw new HCatException(
- "Cannot construct partition info from an empty storage
descriptor.");
- }
- HCatSchema schema = new HCatSchema(HCatUtil.getHCatFieldSchemaList(sd
- .getCols()));
- return schema;
+ public static HCatSchema extractSchema(Table table) throws HCatException {
+ return new
HCatSchema(HCatUtil.getHCatFieldSchemaList(table.getCols()));
+ }
+
+ public static HCatSchema extractSchema(Partition partition) throws
HCatException {
+ return new
HCatSchema(HCatUtil.getHCatFieldSchemaList(partition.getCols()));
}
public static List<FieldSchema> getFieldSchemaList(
@@ -183,14 +182,13 @@ public class HCatUtil {
}
}
- public static Table getTable(HiveMetaStoreClient client, String dbName,
- String tableName) throws Exception {
- return client.getTable(dbName, tableName);
+ public static Table getTable(HiveMetaStoreClient client, String dbName,
String tableName)
+ throws NoSuchObjectException, TException, MetaException {
+ return new Table(client.getTable(dbName, tableName));
}
public static HCatSchema getTableSchemaWithPtnCols(Table table) throws
IOException {
- HCatSchema tableSchema = new
HCatSchema(HCatUtil.getHCatFieldSchemaList(
- new
org.apache.hadoop.hive.ql.metadata.Table(table).getCols()));
+ HCatSchema tableSchema = new
HCatSchema(HCatUtil.getHCatFieldSchemaList(table.getCols()));
if (table.getPartitionKeys().size() != 0) {
@@ -240,7 +238,7 @@ public class HCatUtil {
partitionKeyMap.put(field.getName().toLowerCase(), field);
}
- List<FieldSchema> tableCols = table.getSd().getCols();
+ List<FieldSchema> tableCols = table.getCols();
List<FieldSchema> newFields = new ArrayList<FieldSchema>();
for (int i = 0; i < partitionSchema.getFields().size(); i++) {
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java?rev=1379061&r1=1379060&r2=1379061&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
(original)
+++
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
Thu Aug 30 18:53:52 2012
@@ -32,7 +32,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.mapred.HCatMapRedUtil;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.JobContext;
@@ -187,9 +187,8 @@ class FileOutputCommitterContainer exten
Path src;
OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(jobContext);
if (dynamicPartitioningUsed){
- src = new Path(getPartitionRootLocation(
-
jobInfo.getLocation().toString(),jobInfo.getTableInfo().getTable().getPartitionKeysSize()
- ));
+ src = new Path(getPartitionRootLocation(jobInfo.getLocation(),
+ jobInfo.getTableInfo().getTable().getPartitionKeysSize()));
}else{
src = new Path(jobInfo.getLocation());
}
@@ -243,8 +242,8 @@ class FileOutputCommitterContainer exten
OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context);
Configuration conf = context.getConfiguration();
- Table table = jobInfo.getTableInfo().getTable();
- Path tblPath = new Path(table.getSd().getLocation());
+ Table table = new Table(jobInfo.getTableInfo().getTable());
+ Path tblPath = new Path(table.getTTable().getSd().getLocation());
FileSystem fs = tblPath.getFileSystem(conf);
if( table.getPartitionKeys().size() == 0 ) {
@@ -280,7 +279,8 @@ class FileOutputCommitterContainer exten
HiveConf hiveConf = HCatUtil.getHiveConf(conf);
client = HCatUtil.getHiveClient(hiveConf);
- StorerInfo storer =
InternalUtil.extractStorerInfo(table.getSd(),table.getParameters());
+ StorerInfo storer =
+ InternalUtil.extractStorerInfo(table.getTTable().getSd(),
table.getParameters());
updateTableSchema(client, table, jobInfo.getOutputSchema());
@@ -426,12 +426,10 @@ class FileOutputCommitterContainer exten
Table table, FileSystem fs,
String grpName, FsPermission perms) throws IOException {
- StorageDescriptor tblSD = table.getSd();
-
Partition partition = new Partition();
partition.setDbName(table.getDbName());
partition.setTableName(table.getTableName());
- partition.setSd(new StorageDescriptor(tblSD));
+ partition.setSd(new StorageDescriptor(table.getTTable().getSd()));
List<FieldSchema> fields = new ArrayList<FieldSchema>();
for(HCatFieldSchema fieldSchema : outputSchema.getFields()) {
@@ -500,7 +498,7 @@ class FileOutputCommitterContainer exten
private String getFinalDynamicPartitionDestination(Table table,
Map<String,String> partKVs) {
//
file:///tmp/hcat_junit_warehouse/employee/_DYN0.7770480401313761/emp_country=IN/emp_state=KA
->
//
file:///tmp/hcat_junit_warehouse/employee/emp_country=IN/emp_state=KA
- Path partPath = new Path(table.getSd().getLocation());
+ Path partPath = new Path(table.getTTable().getSd().getLocation());
for(FieldSchema partKey : table.getPartitionKeys()){
partPath = constructPartialPartPath(partPath,
partKey.getName().toLowerCase(), partKVs);
}
@@ -541,12 +539,12 @@ class FileOutputCommitterContainer exten
List<FieldSchema> newColumns = HCatUtil.validatePartitionSchema(table,
partitionSchema);
if( newColumns.size() != 0 ) {
- List<FieldSchema> tableColumns = new
ArrayList<FieldSchema>(table.getSd().getCols());
+ List<FieldSchema> tableColumns = new
ArrayList<FieldSchema>(table.getTTable().getSd().getCols());
tableColumns.addAll(newColumns);
//Update table schema to add the newly added columns
- table.getSd().setCols(tableColumns);
- client.alter_table(table.getDbName(), table.getTableName(), table);
+ table.getTTable().getSd().setCols(tableColumns);
+ client.alter_table(table.getDbName(), table.getTableName(),
table.getTTable());
}
}
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java?rev=1379061&r1=1379060&r2=1379061&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java
(original)
+++
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java
Thu Aug 30 18:53:52 2012
@@ -27,7 +27,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
@@ -120,7 +120,7 @@ class FileOutputFormatContainer extends
handleDuplicatePublish(context,
jobInfo,
client,
- jobInfo.getTableInfo().getTable());
+ new Table(jobInfo.getTableInfo().getTable()));
} catch (MetaException e) {
throw new IOException(e);
} catch (TException e) {
@@ -190,7 +190,7 @@ class FileOutputFormatContainer extends
table, outputInfo.getPartitionValues());
// non-partitioned table
- Path tablePath = new Path(table.getSd().getLocation());
+ Path tablePath = new Path(table.getTTable().getSd().getLocation());
FileSystem fs =
tablePath.getFileSystem(context.getConfiguration());
if ( fs.exists(tablePath) ) {
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java?rev=1379061&r1=1379060&r2=1379061&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java
(original)
+++
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java
Thu Aug 30 18:53:52 2012
@@ -25,6 +25,7 @@ import java.util.Map;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.OutputFormat;
@@ -216,7 +217,8 @@ public abstract class HCatBaseOutputForm
}
}
- HCatUtil.validatePartitionSchema(jobInfo.getTableInfo().getTable(),
schemaWithoutParts);
+ HCatUtil.validatePartitionSchema(
+ new Table(jobInfo.getTableInfo().getTable()), schemaWithoutParts);
jobInfo.setPosOfPartCols(posOfPartCols);
jobInfo.setPosOfDynPartCols(posOfDynPartCols);
jobInfo.setOutputSchema(schemaWithoutParts);
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java?rev=1379061&r1=1379060&r2=1379061&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java
(original)
+++
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java
Thu Aug 30 18:53:52 2012
@@ -32,7 +32,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.OutputCommitter;
@@ -73,7 +73,8 @@ public class HCatOutputFormat extends HC
Configuration conf = job.getConfiguration();
HiveConf hiveConf = HCatUtil.getHiveConf(conf);
client = HCatUtil.getHiveClient(hiveConf);
- Table table = client.getTable(outputJobInfo.getDatabaseName(),
outputJobInfo.getTableName());
+ Table table = HCatUtil.getTable(client,
outputJobInfo.getDatabaseName(),
+ outputJobInfo.getTableName());
List<String> indexList =
client.listIndexNames(outputJobInfo.getDatabaseName(),
outputJobInfo.getTableName(), Short.MAX_VALUE);
@@ -83,7 +84,7 @@ public class HCatOutputFormat extends HC
throw new HCatException(ErrorType.ERROR_NOT_SUPPORTED, "Store
into a table with an automatic index from Pig/Mapreduce is not supported");
}
}
- StorageDescriptor sd = table.getSd();
+ StorageDescriptor sd = table.getTTable().getSd();
if (sd.isCompressed()) {
throw new HCatException(ErrorType.ERROR_NOT_SUPPORTED, "Store into
a compressed partition from Pig/Mapreduce is not supported");
@@ -97,7 +98,7 @@ public class HCatOutputFormat extends HC
throw new HCatException(ErrorType.ERROR_NOT_SUPPORTED, "Store into
a partition with sorted column definition from Pig/Mapreduce is not supported");
}
- if (table.getPartitionKeysSize() == 0 ){
+ if (table.getTTable().getPartitionKeysSize() == 0 ){
if ((outputJobInfo.getPartitionValues() != null) &&
(!outputJobInfo.getPartitionValues().isEmpty())){
// attempt made to save partition values in non-partitioned table
- throw error.
throw new HCatException(ErrorType.ERROR_INVALID_PARTITION_VALUES,
@@ -117,7 +118,7 @@ public class HCatOutputFormat extends HC
}
if ((outputJobInfo.getPartitionValues() == null)
- || (outputJobInfo.getPartitionValues().size() <
table.getPartitionKeysSize())){
+ || (outputJobInfo.getPartitionValues().size() <
table.getTTable().getPartitionKeysSize())){
// dynamic partition usecase - partition values were null, or not
all were specified
// need to figure out which keys are not specified.
List<String> dynamicPartitioningKeys = new ArrayList<String>();
@@ -128,7 +129,7 @@ public class HCatOutputFormat extends HC
}
}
- if (valueMap.size() + dynamicPartitioningKeys.size() !=
table.getPartitionKeysSize()){
+ if (valueMap.size() + dynamicPartitioningKeys.size() !=
table.getTTable().getPartitionKeysSize()){
// If this isn't equal, then bogus key values have been
inserted, error out.
throw new
HCatException(ErrorType.ERROR_INVALID_PARTITION_VALUES,"Invalid partition keys
specified");
}
@@ -148,9 +149,9 @@ public class HCatOutputFormat extends HC
outputJobInfo.setPartitionValues(valueMap);
}
- StorageDescriptor tblSD = table.getSd();
- HCatSchema tableSchema =
HCatUtil.extractSchemaFromStorageDescriptor(tblSD);
- StorerInfo storerInfo =
InternalUtil.extractStorerInfo(tblSD,table.getParameters());
+ HCatSchema tableSchema = HCatUtil.extractSchema(table);
+ StorerInfo storerInfo =
+ InternalUtil.extractStorerInfo(table.getTTable().getSd(),
table.getParameters());
List<String> partitionCols = new ArrayList<String>();
for(FieldSchema schema : table.getPartitionKeys()) {
@@ -160,7 +161,7 @@ public class HCatOutputFormat extends HC
HCatStorageHandler storageHandler =
HCatUtil.getStorageHandler(job.getConfiguration(), storerInfo);
//Serialize the output info into the configuration
- outputJobInfo.setTableInfo(HCatTableInfo.valueOf(table));
+ outputJobInfo.setTableInfo(HCatTableInfo.valueOf(table.getTTable()));
outputJobInfo.setOutputSchema(tableSchema);
harRequested = getHarRequested(hiveConf);
outputJobInfo.setHarRequested(harRequested);
@@ -169,7 +170,7 @@ public class HCatOutputFormat extends HC
HCatUtil.configureOutputStorageHandler(storageHandler,job,outputJobInfo);
- Path tblPath = new Path(table.getSd().getLocation());
+ Path tblPath = new Path(table.getTTable().getSd().getLocation());
/* Set the umask in conf such that files/dirs get created with
table-dir
* permissions. Following three assumptions are made:
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java?rev=1379061&r1=1379060&r2=1379061&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java
(original)
+++
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java
Thu Aug 30 18:53:52 2012
@@ -137,17 +137,17 @@ public class HCatTableInfo implements Se
* @throws IOException
*/
static HCatTableInfo valueOf(Table table) throws IOException {
- HCatSchema dataColumns =
- HCatUtil.extractSchemaFromStorageDescriptor(table.getSd());
- StorerInfo storerInfo =
+ // Explicitly use {@link org.apache.hadoop.hive.ql.metadata.Table} when
getting the schema,
+ // but store @{link org.apache.hadoop.hive.metastore.api.Table} as this
class is serialized
+ // into the job conf.
+ org.apache.hadoop.hive.ql.metadata.Table mTable =
+ new org.apache.hadoop.hive.ql.metadata.Table(table);
+ HCatSchema schema = HCatUtil.extractSchema(mTable);
+ StorerInfo storerInfo =
InternalUtil.extractStorerInfo(table.getSd(), table.getParameters());
- HCatSchema partitionColumns = HCatUtil.getPartitionColumns(table);
- return new HCatTableInfo(table.getDbName(),
- table.getTableName(),
- dataColumns,
- partitionColumns,
- storerInfo,
- table);
+ HCatSchema partitionColumns = HCatUtil.getPartitionColumns(mTable);
+ return new HCatTableInfo(table.getDbName(), table.getTableName(), schema,
+ partitionColumns, storerInfo, table);
}
@Override
Modified:
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java?rev=1379061&r1=1379060&r2=1379061&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java
(original)
+++
incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java
Thu Aug 30 18:53:52 2012
@@ -30,7 +30,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hcatalog.common.ErrorType;
import org.apache.hcatalog.common.HCatConstants;
@@ -94,12 +94,12 @@ public class InitializeInput {
hiveConf = new HiveConf(HCatInputFormat.class);
}
client = HCatUtil.getHiveClient(hiveConf);
- Table table = client.getTable(inputJobInfo.getDatabaseName(),
- inputJobInfo.getTableName());
+ Table table = HCatUtil.getTable(client, inputJobInfo.getDatabaseName(),
+ inputJobInfo.getTableName());
List<PartInfo> partInfoList = new ArrayList<PartInfo>();
- inputJobInfo.setTableInfo(HCatTableInfo.valueOf(table));
+ inputJobInfo.setTableInfo(HCatTableInfo.valueOf(table.getTTable()));
if( table.getPartitionKeys().size() != 0 ) {
//Partitioned table
List<Partition> parts =
client.listPartitionsByFilter(inputJobInfo.getDatabaseName(),
@@ -115,18 +115,19 @@ public class InitializeInput {
// populate partition info
for (Partition ptn : parts){
- PartInfo partInfo = extractPartInfo(ptn.getSd(),ptn.getParameters(),
- job.getConfiguration(),
- inputJobInfo);
+ HCatSchema schema = HCatUtil.extractSchema(
+ new org.apache.hadoop.hive.ql.metadata.Partition(table, ptn));
+ PartInfo partInfo = extractPartInfo(schema, ptn.getSd(),
+ ptn.getParameters(), job.getConfiguration(), inputJobInfo);
partInfo.setPartitionValues(createPtnKeyValueMap(table, ptn));
partInfoList.add(partInfo);
}
}else{
//Non partitioned table
- PartInfo partInfo =
extractPartInfo(table.getSd(),table.getParameters(),
- job.getConfiguration(),
- inputJobInfo);
+ HCatSchema schema = HCatUtil.extractSchema(table);
+ PartInfo partInfo = extractPartInfo(schema, table.getTTable().getSd(),
+ table.getParameters(), job.getConfiguration(), inputJobInfo);
partInfo.setPartitionValues(new HashMap<String,String>());
partInfoList.add(partInfo);
}
@@ -160,29 +161,25 @@ public class InitializeInput {
return ptnKeyValues;
}
- static PartInfo extractPartInfo(StorageDescriptor sd,
+ private static PartInfo extractPartInfo(HCatSchema schema, StorageDescriptor
sd,
Map<String,String> parameters, Configuration conf,
InputJobInfo inputJobInfo) throws IOException{
- HCatSchema schema = HCatUtil.extractSchemaFromStorageDescriptor(sd);
+
StorerInfo storerInfo = InternalUtil.extractStorerInfo(sd,parameters);
Properties hcatProperties = new Properties();
- HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(conf,
- storerInfo);
+ HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(conf,
storerInfo);
// copy the properties from storageHandler to jobProperties
- Map<String, String>jobProperties = HCatUtil.getInputJobProperties(
- storageHandler,
- inputJobInfo);
+ Map<String, String>jobProperties =
HCatUtil.getInputJobProperties(storageHandler, inputJobInfo);
for (String key : parameters.keySet()){
hcatProperties.put(key, parameters.get(key));
}
// FIXME
// Bloating partinfo with inputJobInfo is not good
- return new PartInfo(schema, storageHandler,
- sd.getLocation(), hcatProperties,
- jobProperties, inputJobInfo.getTableInfo());
+ return new PartInfo(schema, storageHandler, sd.getLocation(),
+ hcatProperties, jobProperties, inputJobInfo.getTableInfo());
}
}
Modified:
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHCatUtil.java
URL:
http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHCatUtil.java?rev=1379061&r1=1379060&r2=1379061&view=diff
==============================================================================
---
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHCatUtil.java
(original)
+++
incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHCatUtil.java
Thu Aug 30 18:53:52 2012
@@ -32,7 +32,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hcatalog.data.schema.HCatFieldSchema;
import org.apache.hcatalog.data.schema.HCatSchema;
@@ -120,9 +120,11 @@ public class TestHCatUtil {
"location", "org.apache.hadoop.mapred.TextInputFormat",
"org.apache.hadoop.mapred.TextOutputFormat", false, -1, new
SerDeInfo(),
new ArrayList<String>(), new ArrayList<Order>(), new HashMap<String,
String>());
- Table table = new Table("test_tblname", "test_dbname", "test_owner", 0, 0,
0,
- sd, new ArrayList<FieldSchema>(), new HashMap<String, String>(),
- "viewOriginalText", "viewExpandedText",
TableType.EXTERNAL_TABLE.name());
+ org.apache.hadoop.hive.metastore.api.Table apiTable =
+ new org.apache.hadoop.hive.metastore.api.Table("test_tblname",
"test_dbname", "test_owner",
+ 0, 0, 0, sd, new ArrayList<FieldSchema>(), new HashMap<String,
String>(),
+ "viewOriginalText", "viewExpandedText",
TableType.EXTERNAL_TABLE.name());
+ Table table = new Table(apiTable);
List<HCatFieldSchema> expectedHCatSchema =
Lists.newArrayList(new HCatFieldSchema("username",
HCatFieldSchema.Type.STRING, null));
@@ -133,7 +135,7 @@ public class TestHCatUtil {
// Add a partition key & ensure its reflected in the schema.
List<FieldSchema> partitionKeys =
Lists.newArrayList(new FieldSchema("dt", Constants.STRING_TYPE_NAME,
null));
- table.setPartitionKeys(partitionKeys);
+ table.getTTable().setPartitionKeys(partitionKeys);
expectedHCatSchema.add(new HCatFieldSchema("dt",
HCatFieldSchema.Type.STRING, null));
Assert.assertEquals(new HCatSchema(expectedHCatSchema),
HCatUtil.getTableSchemaWithPtnCols(table));
@@ -163,9 +165,11 @@ public class TestHCatUtil {
false, -1, serDeInfo, new ArrayList<String>(), new ArrayList<Order>(),
new HashMap<String, String>());
- Table table = new Table("test_tblname", "test_dbname", "test_owner", 0, 0,
0,
- sd, new ArrayList<FieldSchema>(), new HashMap<String, String>(),
- "viewOriginalText", "viewExpandedText",
TableType.EXTERNAL_TABLE.name());
+ org.apache.hadoop.hive.metastore.api.Table apiTable =
+ new org.apache.hadoop.hive.metastore.api.Table("test_tblname",
"test_dbname", "test_owner",
+ 0, 0, 0, sd, new ArrayList<FieldSchema>(), new HashMap<String,
String>(),
+ "viewOriginalText", "viewExpandedText",
TableType.EXTERNAL_TABLE.name());
+ Table table = new Table(apiTable);
List<HCatFieldSchema> expectedHCatSchema = Lists.newArrayList(
new HCatFieldSchema("myint", HCatFieldSchema.Type.INT, null),