Repository: incubator-impala
Updated Branches:
  refs/heads/hadoop-next ee2a06d82 -> cc603eb15


IMPALA-4277: bump Hadoop component versions except for Hadoop itself

The Hive version bump required some minor changes to deal with renames
and added function arguments. The other components required no changes
to build the frontend against their API (I haven't been able to test
the JNI backend stuff).

IMPALA-4172 is blocking the hadoop version bump.

This builds if I manually put hive_metastore.thrift in the right place
and set SKIP_TOOLCHAIN_BOOTSTRAP=false. Tests can't run since we don't
have the special tarballs available that are used for the test cluster.

I manually constructed the Hive build dependency by taking
hive_metastore.thrift and putting it at
$CDH_COMPONENTS_HOME/hive-2.1.0-cdh6.x-SNAPSHOT/src/metastore/if/hive_metastore.thrift

The Hive API changes were:

* org.apache.hive.service.cli moved to org.apache.hive.service.rpc
* MetaStoreUtils.validateName() and
  MetaStoreUtils.updatePartitionStatsFast() have additional context
  arguments.
* HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_TABLE_PARTITION_MAX
  was renamed to METASTORE_BATCH_RETRIEVE_OBJECTS_MAX

Change-Id: Ia0be6bbe76d929ceaeb2fa9ac4ebba1820c4dab7
Reviewed-on: http://gerrit.cloudera.org:8080/4698
Reviewed-by: Tim Armstrong <tarmstr...@cloudera.com>
Tested-by: Tim Armstrong <tarmstr...@cloudera.com>


Project: http://git-wip-us.apache.org/repos/asf/incubator-impala/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-impala/commit/51c6fde3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-impala/tree/51c6fde3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-impala/diff/51c6fde3

Branch: refs/heads/hadoop-next
Commit: 51c6fde330d23ab3b1ce47de76140dbf23bd76d9
Parents: ee2a06d
Author: Tim Armstrong <tarmstr...@cloudera.com>
Authored: Tue Oct 11 11:00:17 2016 -0700
Committer: Tim Armstrong <tarmstr...@cloudera.com>
Committed: Tue Oct 18 16:47:56 2016 +0000

----------------------------------------------------------------------
 bin/impala-config.sh                              |  8 ++++----
 common/thrift/TCLIService.thrift                  |  2 +-
 common/thrift/cli_service.thrift                  |  2 +-
 .../org/apache/impala/analysis/ColumnDef.java     |  2 +-
 .../apache/impala/analysis/CreateDataSrcStmt.java |  2 +-
 .../org/apache/impala/analysis/CreateDbStmt.java  |  2 +-
 .../apache/impala/analysis/DropDataSrcStmt.java   |  2 +-
 .../java/org/apache/impala/analysis/Subquery.java |  2 +-
 .../org/apache/impala/analysis/TableName.java     |  4 ++--
 .../java/org/apache/impala/analysis/TypeDef.java  |  2 +-
 .../apache/impala/service/CatalogOpExecutor.java  | 18 ++++++++----------
 .../java/org/apache/impala/service/Frontend.java  |  8 ++++----
 .../org/apache/impala/util/MetaStoreUtil.java     |  2 +-
 .../apache/impala/analysis/AuthorizationTest.java |  6 +++---
 .../org/apache/impala/analysis/ParserTest.java    |  2 +-
 .../org/apache/impala/service/FrontendTest.java   | 12 ++++++------
 16 files changed, 37 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/bin/impala-config.sh
----------------------------------------------------------------------
diff --git a/bin/impala-config.sh b/bin/impala-config.sh
index 38bdc69..6425d90 100755
--- a/bin/impala-config.sh
+++ b/bin/impala-config.sh
@@ -296,10 +296,10 @@ if [[ $OSTYPE == "darwin"* ]]; then
 fi
 
 export IMPALA_HADOOP_VERSION=${IMPALA_HADOOP_VERSION:-2.6.0-cdh5.10.0-SNAPSHOT}
-export IMPALA_HBASE_VERSION=${IMPALA_HBASE_VERSION:-1.2.0-cdh5.10.0-SNAPSHOT}
-export IMPALA_HIVE_VERSION=${IMPALA_HIVE_VERSION:-1.1.0-cdh5.10.0-SNAPSHOT}
-export IMPALA_SENTRY_VERSION=${IMPALA_SENTRY_VERSION:-1.5.1-cdh5.10.0-SNAPSHOT}
-export 
IMPALA_PARQUET_VERSION=${IMPALA_PARQUET_VERSION:-1.5.0-cdh5.10.0-SNAPSHOT}
+export IMPALA_HBASE_VERSION=${IMPALA_HBASE_VERSION:-2.0.0-cdh6.x-SNAPSHOT}
+export IMPALA_HIVE_VERSION=${IMPALA_HIVE_VERSION:-2.1.0-cdh6.x-SNAPSHOT}
+export IMPALA_SENTRY_VERSION=${IMPALA_SENTRY_VERSION:-1.5.1-cdh6.x-SNAPSHOT}
+export IMPALA_PARQUET_VERSION=${IMPALA_PARQUET_VERSION:-1.5.0-cdh6.x-SNAPSHOT}
 export IMPALA_LLAMA_MINIKDC_VERSION=${IMPALA_LLAMA_MINIKDC_VERSION:-1.0.0}
 
 export IMPALA_FE_DIR="$IMPALA_HOME/fe"

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/common/thrift/TCLIService.thrift
----------------------------------------------------------------------
diff --git a/common/thrift/TCLIService.thrift b/common/thrift/TCLIService.thrift
index f95e2f8..afed40e 100644
--- a/common/thrift/TCLIService.thrift
+++ b/common/thrift/TCLIService.thrift
@@ -32,7 +32,7 @@
 // * Service names begin with the letter "T", use a capital letter for each
 //   new word (with no underscores), and end with the word "Service".
 
-namespace java org.apache.hive.service.cli.thrift
+namespace java org.apache.hive.service.rpc.thrift
 namespace cpp apache.hive.service.cli.thrift
 
 // List of protocol versions. A new token should be

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/common/thrift/cli_service.thrift
----------------------------------------------------------------------
diff --git a/common/thrift/cli_service.thrift b/common/thrift/cli_service.thrift
index 1fb033d..d4da795 100644
--- a/common/thrift/cli_service.thrift
+++ b/common/thrift/cli_service.thrift
@@ -36,7 +36,7 @@
 // * Service names begin with the letter "T", use a capital letter for each
 //   new word (with no underscores), and end with the word "Service".
 
-namespace java org.apache.hive.service.cli.thrift
+namespace java org.apache.hive.service.rpc.thrift
 namespace cpp apache.hive.service.cli.thrift
 
 // List of protocol versions. A new token should be

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java 
b/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java
index 6b2a1d2..c65671a 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java
@@ -79,7 +79,7 @@ public class ColumnDef {
 
   public void analyze() throws AnalysisException {
     // Check whether the column name meets the Metastore's requirements.
-    if (!MetaStoreUtils.validateName(colName_)) {
+    if (!MetaStoreUtils.validateName(colName_, null)) {
       throw new AnalysisException("Invalid column/field name: " + colName_);
     }
     if (typeDef_ != null) {

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/main/java/org/apache/impala/analysis/CreateDataSrcStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateDataSrcStmt.java 
b/fe/src/main/java/org/apache/impala/analysis/CreateDataSrcStmt.java
index 30ca223..cc788f5 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateDataSrcStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateDataSrcStmt.java
@@ -54,7 +54,7 @@ public class CreateDataSrcStmt extends StatementBase {
 
   @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
-    if (!MetaStoreUtils.validateName(dataSrcName_)) {
+    if (!MetaStoreUtils.validateName(dataSrcName_, null)) {
       throw new AnalysisException("Invalid data source name: " + dataSrcName_);
     }
     if (!ifNotExists_ && analyzer.getCatalog().getDataSource(dataSrcName_) != 
null) {

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/main/java/org/apache/impala/analysis/CreateDbStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateDbStmt.java 
b/fe/src/main/java/org/apache/impala/analysis/CreateDbStmt.java
index a9fbf20..f603585 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateDbStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateDbStmt.java
@@ -82,7 +82,7 @@ public class CreateDbStmt extends StatementBase {
   @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     // Check whether the db name meets the Metastore's requirements.
-    if (!MetaStoreUtils.validateName(dbName_)) {
+    if (!MetaStoreUtils.validateName(dbName_, null)) {
       throw new AnalysisException("Invalid database name: " + dbName_);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/main/java/org/apache/impala/analysis/DropDataSrcStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/DropDataSrcStmt.java 
b/fe/src/main/java/org/apache/impala/analysis/DropDataSrcStmt.java
index e3dd9a8..90596db 100644
--- a/fe/src/main/java/org/apache/impala/analysis/DropDataSrcStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/DropDataSrcStmt.java
@@ -39,7 +39,7 @@ public class DropDataSrcStmt extends StatementBase {
 
   @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
-    if (!MetaStoreUtils.validateName(dataSrcName_) ||
+    if (!MetaStoreUtils.validateName(dataSrcName_, null) ||
         (!ifExists_ && analyzer.getCatalog().getDataSource(dataSrcName_) == 
null)) {
       throw new AnalysisException(Analyzer.DATA_SRC_DOES_NOT_EXIST_ERROR_MSG +
           dataSrcName_);

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/main/java/org/apache/impala/analysis/Subquery.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/Subquery.java 
b/fe/src/main/java/org/apache/impala/analysis/Subquery.java
index f3dcb7c..4f09bb2 100644
--- a/fe/src/main/java/org/apache/impala/analysis/Subquery.java
+++ b/fe/src/main/java/org/apache/impala/analysis/Subquery.java
@@ -132,7 +132,7 @@ public class Subquery extends Expr {
       Expr expr = stmtResultExprs.get(i);
       String fieldName = null;
       // Check if the label meets the Metastore's requirements.
-      if (MetaStoreUtils.validateName(labels.get(i))) {
+      if (MetaStoreUtils.validateName(labels.get(i), null)) {
         fieldName = labels.get(i);
         // Make sure the field names are unique.
         if (!hasUniqueLabels) {

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/main/java/org/apache/impala/analysis/TableName.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/TableName.java 
b/fe/src/main/java/org/apache/impala/analysis/TableName.java
index 297948e..3e0cc1a 100644
--- a/fe/src/main/java/org/apache/impala/analysis/TableName.java
+++ b/fe/src/main/java/org/apache/impala/analysis/TableName.java
@@ -54,12 +54,12 @@ public class TableName {
    */
   public void analyze() throws AnalysisException {
     if (db_ != null) {
-      if (!MetaStoreUtils.validateName(db_)) {
+      if (!MetaStoreUtils.validateName(db_, null)) {
         throw new AnalysisException("Invalid database name: " + db_);
       }
     }
     Preconditions.checkNotNull(tbl_);
-    if (!MetaStoreUtils.validateName(tbl_)) {
+    if (!MetaStoreUtils.validateName(tbl_, null)) {
       throw new AnalysisException("Invalid table/view name: " + tbl_);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/main/java/org/apache/impala/analysis/TypeDef.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/TypeDef.java 
b/fe/src/main/java/org/apache/impala/analysis/TypeDef.java
index 76e6a8f..62d6539 100644
--- a/fe/src/main/java/org/apache/impala/analysis/TypeDef.java
+++ b/fe/src/main/java/org/apache/impala/analysis/TypeDef.java
@@ -133,7 +133,7 @@ public class TypeDef implements ParseNode {
                 f.getName(), toSql()));
       }
       // Check whether the column name meets the Metastore's requirements.
-      if (!MetaStoreUtils.validateName(f.getName().toLowerCase())) {
+      if (!MetaStoreUtils.validateName(f.getName().toLowerCase(), null)) {
         throw new AnalysisException("Invalid struct field name: " + 
f.getName());
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/main/java/org/apache/impala/service/CatalogOpExecutor.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/service/CatalogOpExecutor.java 
b/fe/src/main/java/org/apache/impala/service/CatalogOpExecutor.java
index 780bf13..cf2682e 100644
--- a/fe/src/main/java/org/apache/impala/service/CatalogOpExecutor.java
+++ b/fe/src/main/java/org/apache/impala/service/CatalogOpExecutor.java
@@ -677,8 +677,7 @@ public class CatalogOpExecutor {
           partition.getValuesAsString(), numRows));
       PartitionStatsUtil.partStatsToParameters(partitionStats, partition);
       partition.putToParameters(StatsSetupConst.ROW_COUNT, 
String.valueOf(numRows));
-      partition.putToParameters(StatsSetupConst.STATS_GENERATED_VIA_STATS_TASK,
-          StatsSetupConst.TRUE);
+      partition.putToParameters(StatsSetupConst.STATS_GENERATED, 
StatsSetupConst.TASK);
       ++numTargetedPartitions;
       modifiedParts.add(partition);
     }
@@ -696,8 +695,7 @@ public class CatalogOpExecutor {
     // Update the table's ROW_COUNT parameter.
     msTbl.putToParameters(StatsSetupConst.ROW_COUNT,
         String.valueOf(params.getTable_stats().num_rows));
-    msTbl.putToParameters(StatsSetupConst.STATS_GENERATED_VIA_STATS_TASK,
-        StatsSetupConst.TRUE);
+    msTbl.putToParameters(StatsSetupConst.STATS_GENERATED, 
StatsSetupConst.TASK);
     return numTargetedPartitions;
   }
 
@@ -1687,7 +1685,7 @@ public class CatalogOpExecutor {
         cacheIds = Lists.<Long>newArrayList(id);
         // Update the partition metadata to include the cache directive id.
         msClient.getHiveClient().alter_partition(partition.getDbName(),
-            partition.getTableName(), partition);
+            partition.getTableName(), partition, null);
       }
       updateLastDdlTime(msTbl, msClient);
     } catch (AlreadyExistsException e) {
@@ -2212,7 +2210,7 @@ public class CatalogOpExecutor {
         }
         // Update the partition metadata to include the cache directive id.
         msClient.getHiveClient().alter_partitions(tableName.getDb(),
-            tableName.getTbl(), hmsPartitions);
+            tableName.getTbl(), hmsPartitions, null);
       }
       updateLastDdlTime(msTbl, msClient);
     } catch (AlreadyExistsException e) {
@@ -2355,7 +2353,7 @@ public class CatalogOpExecutor {
     try (MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
       TableName tableName = tbl.getTableName();
       msClient.getHiveClient().alter_partition(
-          tableName.getDb(), tableName.getTbl(), partition.toHmsPartition());
+          tableName.getDb(), tableName.getTbl(), partition.toHmsPartition(), 
null);
       org.apache.hadoop.hive.metastore.api.Table msTbl =
           tbl.getMetaStoreTable().deepCopy();
       updateLastDdlTime(msTbl, msClient);
@@ -2511,7 +2509,7 @@ public class CatalogOpExecutor {
         try {
           // Alter partitions in bulk.
           msClient.getHiveClient().alter_partitions(dbName, tableName,
-              hmsPartitions.subList(i, numPartitionsToUpdate));
+              hmsPartitions.subList(i, numPartitionsToUpdate), null);
           // Mark the corresponding HdfsPartition objects as dirty
           for (org.apache.hadoop.hive.metastore.api.Partition msPartition:
                hmsPartitions.subList(i, numPartitionsToUpdate)) {
@@ -2850,7 +2848,7 @@ public class CatalogOpExecutor {
               
partition.getSd().setSerdeInfo(msTbl.getSd().getSerdeInfo().deepCopy());
               partition.getSd().setLocation(msTbl.getSd().getLocation() + "/" +
                   partName.substring(0, partName.length() - 1));
-              MetaStoreUtils.updatePartitionStatsFast(partition, warehouse);
+              MetaStoreUtils.updatePartitionStatsFast(partition, warehouse, 
null);
             }
 
             // First add_partitions and then alter_partitions the successful 
ones with
@@ -2881,7 +2879,7 @@ public class CatalogOpExecutor {
                 }
                 try {
                   msClient.getHiveClient().alter_partitions(tblName.getDb(),
-                      tblName.getTbl(), cachedHmsParts);
+                      tblName.getTbl(), cachedHmsParts, null);
                 } catch (Exception e) {
                   LOG.error("Failed in alter_partitions: ", e);
                   // Try to uncache the partitions when the alteration in the 
HMS failed.

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/main/java/org/apache/impala/service/Frontend.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/service/Frontend.java 
b/fe/src/main/java/org/apache/impala/service/Frontend.java
index 00a3d93..b15044b 100644
--- a/fe/src/main/java/org/apache/impala/service/Frontend.java
+++ b/fe/src/main/java/org/apache/impala/service/Frontend.java
@@ -38,10 +38,10 @@ import java.util.concurrent.atomic.AtomicReference;
 import org.apache.impala.catalog.KuduTable;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hive.service.cli.thrift.TGetColumnsReq;
-import org.apache.hive.service.cli.thrift.TGetFunctionsReq;
-import org.apache.hive.service.cli.thrift.TGetSchemasReq;
-import org.apache.hive.service.cli.thrift.TGetTablesReq;
+import org.apache.hive.service.rpc.thrift.TGetColumnsReq;
+import org.apache.hive.service.rpc.thrift.TGetFunctionsReq;
+import org.apache.hive.service.rpc.thrift.TGetSchemasReq;
+import org.apache.hive.service.rpc.thrift.TGetTablesReq;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/main/java/org/apache/impala/util/MetaStoreUtil.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/util/MetaStoreUtil.java 
b/fe/src/main/java/org/apache/impala/util/MetaStoreUtil.java
index 6968f33..4612906 100644
--- a/fe/src/main/java/org/apache/impala/util/MetaStoreUtil.java
+++ b/fe/src/main/java/org/apache/impala/util/MetaStoreUtil.java
@@ -67,7 +67,7 @@ public class MetaStoreUtil {
     // Get the value from the Hive configuration, if present.
     HiveConf hiveConf = new HiveConf(HdfsTable.class);
     String strValue = hiveConf.get(
-        
HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_TABLE_PARTITION_MAX.toString());
+        HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_OBJECTS_MAX.toString());
     if (strValue != null) {
       try {
         maxPartitionsPerRpc_ = Short.parseShort(strValue);

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java 
b/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java
index 38aff7d..afdfea5 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java
@@ -30,9 +30,9 @@ import java.util.UUID;
 
 import org.apache.hadoop.conf.Configuration;
 import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL;
-import org.apache.hive.service.cli.thrift.TGetColumnsReq;
-import org.apache.hive.service.cli.thrift.TGetSchemasReq;
-import org.apache.hive.service.cli.thrift.TGetTablesReq;
+import org.apache.hive.service.rpc.thrift.TGetColumnsReq;
+import org.apache.hive.service.rpc.thrift.TGetSchemasReq;
+import org.apache.hive.service.rpc.thrift.TGetTablesReq;
 import org.apache.sentry.provider.common.ResourceAuthorizationProvider;
 import org.apache.sentry.provider.file.LocalGroupResourceAuthorizationProvider;
 import org.junit.After;

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/ParserTest.java 
b/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
index 8b8ea42..93a7f17 100644
--- a/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
@@ -2729,7 +2729,7 @@ public class ParserTest {
     // may have unquoted identifiers corresponding to keywords.
     for (String keyword: SqlScanner.keywordMap.keySet()) {
       // Skip keywords that are not valid field/column names in the Metastore.
-      if (!MetaStoreUtils.validateName(keyword)) continue;
+      if (!MetaStoreUtils.validateName(keyword, null)) continue;
       String structType = "STRUCT<" + keyword + ":INT>";
       TypeDefsParseOk(structType);
     }

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/51c6fde3/fe/src/test/java/org/apache/impala/service/FrontendTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/service/FrontendTest.java 
b/fe/src/test/java/org/apache/impala/service/FrontendTest.java
index dfbdb12..7cd1b56 100644
--- a/fe/src/test/java/org/apache/impala/service/FrontendTest.java
+++ b/fe/src/test/java/org/apache/impala/service/FrontendTest.java
@@ -25,12 +25,12 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.commons.lang.exception.ExceptionUtils;
-import org.apache.hive.service.cli.thrift.TGetCatalogsReq;
-import org.apache.hive.service.cli.thrift.TGetColumnsReq;
-import org.apache.hive.service.cli.thrift.TGetFunctionsReq;
-import org.apache.hive.service.cli.thrift.TGetInfoReq;
-import org.apache.hive.service.cli.thrift.TGetSchemasReq;
-import org.apache.hive.service.cli.thrift.TGetTablesReq;
+import org.apache.hive.service.rpc.thrift.TGetCatalogsReq;
+import org.apache.hive.service.rpc.thrift.TGetColumnsReq;
+import org.apache.hive.service.rpc.thrift.TGetFunctionsReq;
+import org.apache.hive.service.rpc.thrift.TGetInfoReq;
+import org.apache.hive.service.rpc.thrift.TGetSchemasReq;
+import org.apache.hive.service.rpc.thrift.TGetTablesReq;
 import org.junit.Test;
 
 import org.apache.impala.analysis.AuthorizationTest;

Reply via email to