hive git commit: Addendum to HIVE-18553 : Support schema evolution in Parquet Vectorization reader. Removes extra q.out file

2018-02-15 Thread vihangk1
Repository: hive
Updated Branches:
  refs/heads/master 634f71d11 -> 01f34e49b


Addendum to HIVE-18553 : Support schema evolution in Parquet Vectorization 
reader. Removes extra q.out file


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/01f34e49
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/01f34e49
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/01f34e49

Branch: refs/heads/master
Commit: 01f34e49b352bd06ad8e65a1da613de45773c1c6
Parents: 634f71d
Author: Vihang Karajgaonkar 
Authored: Thu Feb 15 17:04:44 2018 -0800
Committer: Vihang Karajgaonkar 
Committed: Thu Feb 15 17:04:53 2018 -0800

--
 .../schema_evol_par_vec_table.q.out | 357 ---
 1 file changed, 357 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/01f34e49/ql/src/test/results/clientpositive/schema_evol_par_vec_table.q.out
--
diff --git a/ql/src/test/results/clientpositive/schema_evol_par_vec_table.q.out 
b/ql/src/test/results/clientpositive/schema_evol_par_vec_table.q.out
deleted file mode 100644
index a6128b6..000
--- a/ql/src/test/results/clientpositive/schema_evol_par_vec_table.q.out
+++ /dev/null
@@ -1,357 +0,0 @@
-PREHOOK: query: drop table test_alter
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table test_alter
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: drop table test_alter2
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table test_alter2
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: drop table test_alter3
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table test_alter3
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table test_alter (id string) stored as parquet
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@test_alter
-POSTHOOK: query: create table test_alter (id string) stored as parquet
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@test_alter
-PREHOOK: query: insert into test_alter values ('1'), ('2'), ('3')
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@test_alter
-POSTHOOK: query: insert into test_alter values ('1'), ('2'), ('3')
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@test_alter
-POSTHOOK: Lineage: test_alter.id SCRIPT []
-PREHOOK: query: select * from test_alter
-PREHOOK: type: QUERY
-PREHOOK: Input: default@test_alter
- A masked pattern was here 
-POSTHOOK: query: select * from test_alter
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@test_alter
- A masked pattern was here 
-1
-2
-3
-PREHOOK: query: alter table test_alter add columns (newCol string)
-PREHOOK: type: ALTERTABLE_ADDCOLS
-PREHOOK: Input: default@test_alter
-PREHOOK: Output: default@test_alter
-POSTHOOK: query: alter table test_alter add columns (newCol string)
-POSTHOOK: type: ALTERTABLE_ADDCOLS
-POSTHOOK: Input: default@test_alter
-POSTHOOK: Output: default@test_alter
-PREHOOK: query: select * from test_alter
-PREHOOK: type: QUERY
-PREHOOK: Input: default@test_alter
- A masked pattern was here 
-POSTHOOK: query: select * from test_alter
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@test_alter
- A masked pattern was here 
-1  NULL
-2  NULL
-3  NULL
-PREHOOK: query: insert into test_alter values ('4', '100')
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@test_alter
-POSTHOOK: query: insert into test_alter values ('4', '100')
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@test_alter
-POSTHOOK: Lineage: test_alter.id SCRIPT []
-POSTHOOK: Lineage: test_alter.newcol SCRIPT []
-PREHOOK: query: select * from test_alter
-PREHOOK: type: QUERY
-PREHOOK: Input: default@test_alter
- A masked pattern was here 
-POSTHOOK: query: select * from test_alter
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@test_alter
- A masked pattern was here 
-1  NULL
-2  NULL
-3  NULL
-4  100
-PREHOOK: query: alter table test_alter replace columns (id string)
-PREHOOK: type: ALTERTABLE_REPLACECOLS
-PREHOOK: Input: default@test_alter
-PREHOOK: Output: default@test_alter
-POSTHOOK: query: alter table test_alter replace columns (id string)
-POSTHOOK: type: ALTERTABLE_REPLACECOLS
-POSTHOOK: Input: default@test_alter
-POSTHOOK: Output: default@test_alter
-PREHOOK: query: select * from test_alter
-PREHOOK: type: QUERY
-PREHOOK: Input: default@test_alter
- A masked pattern was here 
-POSTHOOK: query: select * from test_alter
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@test_alter
- A masked pattern was here 
-1
-2
-3
-4
-PREHOOK: query: alter table test_alter replace columns (id s

[1/2] hive git commit: HIVE-18588 Categorized standalone-metastore tests so only some run as part of 'mvn test'. All are run as part of CI (Alan Gates, reviewed by Peter Vary)

2018-02-15 Thread gates
Repository: hive
Updated Branches:
  refs/heads/master fd561935d -> 634f71d11


http://git-wip-us.apache.org/repos/asf/hive/blob/634f71d1/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestDropPartitions.java
--
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestDropPartitions.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestDropPartitions.java
index 2ed7ec0..e550bca 100644
--- 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestDropPartitions.java
+++ 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestDropPartitions.java
@@ -27,6 +27,7 @@ import java.util.stream.Collectors;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.PartitionDropOptions;
+import org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -43,6 +44,7 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
@@ -52,6 +54,7 @@ import com.google.common.collect.Lists;
  * Tests for dropping partitions.
  */
 @RunWith(Parameterized.class)
+@Category(MetastoreCheckinTest.class)
 public class TestDropPartitions {
 
   // Needed until there is no junit release with @BeforeParam, @AfterParam 
(junit 4.13)

http://git-wip-us.apache.org/repos/asf/hive/blob/634f71d1/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestExchangePartitions.java
--
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestExchangePartitions.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestExchangePartitions.java
index 5a7aeb7..3a06aec 100644
--- 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestExchangePartitions.java
+++ 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestExchangePartitions.java
@@ -28,6 +28,7 @@ import java.util.stream.Collectors;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -45,6 +46,7 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
@@ -54,6 +56,7 @@ import com.google.common.collect.Lists;
  * Tests for exchanging partitions.
  */
 @RunWith(Parameterized.class)
+@Category(MetastoreCheckinTest.class)
 public class TestExchangePartitions {
 
   // Needed until there is no junit release with @BeforeParam, @AfterParam 
(junit 4.13)

http://git-wip-us.apache.org/repos/asf/hive/blob/634f71d1/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestFunctions.java
--
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestFunctions.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestFunctions.java
index f3b7ce5..1974399 100644
--- 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestFunctions.java
+++ 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestFunctions.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.metastore.client;
 
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.FunctionType;
@@ -39,6 +40,7 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
@@ -50,6 +52,7 @@ import java.util.stream.Collectors;
  * Test class for IMetaStoreClient API. Testing the Function related functions.
  */
 @RunWith(Parameterized.class)
+@Category(MetastoreCheckinTest.class)
 public cla

[2/2] hive git commit: HIVE-18588 Categorized standalone-metastore tests so only some run as part of 'mvn test'. All are run as part of CI (Alan Gates, reviewed by Peter Vary)

2018-02-15 Thread gates
HIVE-18588 Categorized standalone-metastore tests so only some run as part of 
'mvn test'.  All are run as part of CI (Alan Gates, reviewed by Peter Vary)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/634f71d1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/634f71d1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/634f71d1

Branch: refs/heads/master
Commit: 634f71d11af7bfd07926452c60064fbba47d6a1f
Parents: fd56193
Author: Alan Gates 
Authored: Thu Feb 15 16:20:24 2018 -0800
Committer: Alan Gates 
Committed: Thu Feb 15 16:20:24 2018 -0800

--
 standalone-metastore/DEV-README | 23 ++
 standalone-metastore/pom.xml| 47 +++-
 .../hadoop/hive/common/TestStatsSetupConst.java |  4 ++
 .../ndv/fm/TestFMSketchSerialization.java   |  3 ++
 .../hive/common/ndv/hll/TestHLLNoBias.java  |  3 ++
 .../common/ndv/hll/TestHLLSerialization.java|  3 ++
 .../hive/common/ndv/hll/TestHyperLogLog.java|  3 ++
 .../common/ndv/hll/TestHyperLogLogDense.java|  3 ++
 .../common/ndv/hll/TestHyperLogLogSparse.java   |  3 ++
 .../common/ndv/hll/TestSparseEncodeHash.java|  3 ++
 .../hadoop/hive/metastore/TestAdminUser.java|  3 ++
 .../hive/metastore/TestAggregateStatsCache.java |  3 ++
 .../hadoop/hive/metastore/TestDeadline.java |  3 ++
 .../metastore/TestEmbeddedHiveMetaStore.java|  3 ++
 .../hadoop/hive/metastore/TestFilterHooks.java  |  3 ++
 .../hive/metastore/TestHiveAlterHandler.java|  3 ++
 .../metastore/TestHiveMetaStoreGetMetaConf.java |  3 ++
 .../TestHiveMetaStorePartitionSpecs.java|  3 ++
 .../metastore/TestHiveMetaStoreTimeout.java |  3 ++
 .../hive/metastore/TestHiveMetaStoreTxns.java   |  3 ++
 ...TestHiveMetaStoreWithEnvironmentContext.java |  3 ++
 .../hive/metastore/TestHiveMetastoreCli.java|  3 ++
 .../hive/metastore/TestLockRequestBuilder.java  |  3 ++
 .../hive/metastore/TestMarkPartition.java   |  3 ++
 .../hive/metastore/TestMarkPartitionRemote.java |  3 ++
 .../TestMetaStoreConnectionUrlHook.java |  3 ++
 .../TestMetaStoreEndFunctionListener.java   |  3 ++
 .../metastore/TestMetaStoreEventListener.java   |  3 ++
 .../TestMetaStoreEventListenerOnlyOnCommit.java |  3 ++
 .../TestMetaStoreEventListenerWithOldConf.java  |  3 ++
 .../metastore/TestMetaStoreInitListener.java|  3 ++
 .../metastore/TestMetaStoreListenersError.java  |  3 ++
 .../metastore/TestMetaStoreSchemaFactory.java   |  3 ++
 .../hive/metastore/TestMetaStoreSchemaInfo.java |  3 ++
 .../hadoop/hive/metastore/TestObjectStore.java  |  3 ++
 .../metastore/TestObjectStoreInitRetry.java |  3 ++
 .../hadoop/hive/metastore/TestOldSchema.java|  3 ++
 .../TestPartitionNameWhitelistValidation.java   |  3 ++
 .../hive/metastore/TestRawStoreProxy.java   |  3 ++
 .../hive/metastore/TestRemoteHiveMetaStore.java |  3 ++
 .../TestRemoteHiveMetaStoreIpAddress.java   |  3 ++
 .../TestRemoteUGIHiveMetaStoreIpAddress.java|  3 ++
 .../TestRetriesInRetryingHMSHandler.java|  3 ++
 .../hive/metastore/TestRetryingHMSHandler.java  |  3 ++
 .../metastore/TestSetUGIOnBothClientServer.java |  3 ++
 .../hive/metastore/TestSetUGIOnOnlyClient.java  |  3 ++
 .../hive/metastore/TestSetUGIOnOnlyServer.java  |  3 ++
 .../annotation/MetastoreCheckinTest.java| 25 +++
 .../metastore/annotation/MetastoreTest.java | 24 ++
 .../metastore/annotation/MetastoreUnitTest.java | 25 +++
 .../hive/metastore/cache/TestCachedStore.java   |  3 ++
 .../client/TestAddAlterDropIndexes.java |  3 ++
 .../metastore/client/TestAddPartitions.java |  3 ++
 .../client/TestAddPartitionsFromPartSpec.java   |  3 ++
 .../metastore/client/TestAlterPartitions.java   |  3 ++
 .../metastore/client/TestAppendPartitions.java  |  3 ++
 .../hive/metastore/client/TestDatabases.java|  3 ++
 .../metastore/client/TestDropPartitions.java|  3 ++
 .../client/TestExchangePartitions.java  |  3 ++
 .../hive/metastore/client/TestFunctions.java|  3 ++
 .../metastore/client/TestGetListIndexes.java|  3 ++
 .../metastore/client/TestGetPartitions.java |  3 ++
 .../hive/metastore/client/TestGetTableMeta.java |  3 ++
 .../metastore/client/TestListPartitions.java|  3 ++
 .../TestTablesCreateDropAlterTruncate.java  |  3 ++
 .../metastore/client/TestTablesGetExists.java   |  3 ++
 .../hive/metastore/client/TestTablesList.java   |  3 ++
 .../hive/metastore/conf/TestMetastoreConf.java  |  3 ++
 .../TestDataSourceProviderFactory.java  |  3 ++
 .../json/TestJSONMessageDeserializer.java   |  3 ++
 .../hive/metastore/metrics/TestMetrics.java |  3 ++
 .../tools/TestMetastoreSchemaTool.java  |  3 ++
 .../tools/TestSchemaToolForMetastore.java   |  3 ++
 .../metastore/txn/TestTxnHandlerNegative.java   |  3 ++
 .../hadoop/hive/metastore/

hive git commit: HIVE-18672: Printed state in RemoteSparkJobMonitor is ambiguous (Sahil Takiar, reviewed by Peter Vary)

2018-02-15 Thread stakiar
Repository: hive
Updated Branches:
  refs/heads/master 217811254 -> fd561935d


HIVE-18672: Printed state in RemoteSparkJobMonitor is ambiguous (Sahil Takiar, 
reviewed by Peter Vary)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fd561935
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fd561935
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fd561935

Branch: refs/heads/master
Commit: fd561935d4d1e34f25bc4c851a8acf37ec7879a8
Parents: 2178112
Author: Sahil Takiar 
Authored: Thu Feb 15 15:01:36 2018 -0800
Committer: Sahil Takiar 
Committed: Thu Feb 15 15:01:36 2018 -0800

--
 .../exec/spark/status/RemoteSparkJobMonitor.java  | 18 +-
 .../ql/exec/spark/status/SparkJobMonitor.java |  2 +-
 2 files changed, 10 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/fd561935/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
index adb65a5..3467ae4 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
@@ -62,10 +62,11 @@ public class RemoteSparkJobMonitor extends SparkJobMonitor {
 perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.SPARK_SUBMIT_TO_RUNNING);
 
 startTime = System.currentTimeMillis();
+JobHandle.State state = null;
 
 while (true) {
   try {
-JobHandle.State state = sparkJobStatus.getRemoteJobState();
+state = sparkJobStatus.getRemoteJobState();
 Preconditions.checkState(sparkJobStatus.isRemoteActive(), "Connection 
to remote Spark driver was lost");
 
 switch (state) {
@@ -76,14 +77,13 @@ public class RemoteSparkJobMonitor extends SparkJobMonitor {
 HiveException he = new 
HiveException(ErrorMsg.SPARK_JOB_MONITOR_TIMEOUT,
 Long.toString(timeCount));
 console.printError(he.getMessage());
-console.printError("Status: " + state);
 sparkJobStatus.setError(he);
 running = false;
 done = true;
 rc = 2;
   }
   if (LOG.isDebugEnabled()) {
-console.printInfo("state = " + state);
+console.printInfo("Spark job state = " + state );
   }
   break;
 case STARTED:
@@ -98,8 +98,7 @@ public class RemoteSparkJobMonitor extends SparkJobMonitor {
   console.printInfo("\nQuery Hive on Spark job[" + 
sparkJobStatus.getJobId() +
   "] stages: " + 
Arrays.toString(sparkJobStatus.getStageIds()));
 
-  console.printInfo("\nStatus: Running (Hive on Spark job["
-+ sparkJobStatus.getJobId() + "])");
+  console.printInfo("Spark job[" + sparkJobStatus.getJobId() + "] 
status = RUNNING");
   running = true;
 
   String format = "Job Progress Format\nCurrentTime 
StageId_StageAttemptId: "
@@ -142,8 +141,8 @@ public class RemoteSparkJobMonitor extends SparkJobMonitor {
   printStatus(progressMap, lastProgressMap);
   lastProgressMap = progressMap;
   double duration = (System.currentTimeMillis() - startTime) / 1000.0;
-  console.printInfo("Status: Finished successfully in "
-+ String.format("%.2f seconds", duration));
+  console.printInfo("Spark job[" + sparkJobStatus.getJobId() + "] 
finished successfully in "
++ String.format("%.2f second(s)", duration));
   running = false;
   done = true;
   break;
@@ -176,7 +175,7 @@ public class RemoteSparkJobMonitor extends SparkJobMonitor {
   rc = 3;
   break;
 case CANCELLED:
-  console.printInfo("Status: Cancelled");
+  console.printInfo("Spark job[" + sparkJobStatus.getJobId() + " was 
cancelled");
   running = false;
   done = true;
   rc = 3;
@@ -193,7 +192,8 @@ public class RemoteSparkJobMonitor extends SparkJobMonitor {
   finalException = new HiveException(e, 
ErrorMsg.SPARK_JOB_INTERRUPTED);
   LOG.warn("Interrupted while monitoring the Hive on Spark 
application, exiting");
 } else {
-  String msg = " with exception '" + Utilities.getNameMessage(e) + "'";
+  String msg = " with exception '" + Utilities.getNameMessage(e) + "' 
Last known state = " +
+  (state != null ? state.name() : "UNKNOWN");
   msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + 
msg;
 
   // Has 

hive git commit: HIVE-18721 : Bucket Map Join : Handle empty buckets (Deepak Jaiswal, reviewed by Gunther Hagleitner)

2018-02-15 Thread djaiswal
Repository: hive
Updated Branches:
  refs/heads/master dabb62d6e -> 217811254


HIVE-18721 : Bucket Map Join : Handle empty buckets (Deepak Jaiswal, reviewed 
by Gunther Hagleitner)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/21781125
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/21781125
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/21781125

Branch: refs/heads/master
Commit: 21781125419826bd1ae76d980f958419a84d84f3
Parents: dabb62d
Author: Deepak Jaiswal 
Authored: Thu Feb 15 12:26:07 2018 -0800
Committer: Deepak Jaiswal 
Committed: Thu Feb 15 12:26:54 2018 -0800

--
 .../test/resources/testconfiguration.properties |   1 +
 .../hive/ql/exec/tez/CustomPartitionEdge.java   |   7 ++
 .../clientpositive/bucket_map_join_tez_empty.q  |  18 +++
 .../llap/bucket_map_join_tez_empty.q.out| 121 +++
 4 files changed, 147 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/21781125/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index c2252f3..942f97a 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -506,6 +506,7 @@ minillaplocal.query.files=\
   bucket_many.q,\
   bucket_map_join_tez1.q,\
   bucket_map_join_tez2.q,\
+  bucket_map_join_tez_empty.q,\
   bucketizedhiveinputformat.q,\
   bucketmapjoin6.q,\
   bucketmapjoin7.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/21781125/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java
index 1ac1d14..4248cd9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.exec.tez;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -84,6 +85,12 @@ public class CustomPartitionEdge extends EdgeManagerPlugin {
   @Override
   public void routeDataMovementEventToDestination(DataMovementEvent event,
   int sourceTaskIndex, int sourceOutputIndex, Map> 
mapDestTaskIndices) {
+if (conf.getRoutingTable().get(sourceOutputIndex).size() == 0) {
+  // No task for given input, return empty list with -1 as index
+  mapDestTaskIndices.put(-1, new ArrayList<>());
+  return;
+}
+// Normal case.
 List outputIndices = Collections.singletonList(sourceTaskIndex);
 for (Integer destIndex : conf.getRoutingTable().get(sourceOutputIndex)) {
   mapDestTaskIndices.put(destIndex, outputIndices);

http://git-wip-us.apache.org/repos/asf/hive/blob/21781125/ql/src/test/queries/clientpositive/bucket_map_join_tez_empty.q
--
diff --git a/ql/src/test/queries/clientpositive/bucket_map_join_tez_empty.q 
b/ql/src/test/queries/clientpositive/bucket_map_join_tez_empty.q
new file mode 100644
index 000..cc43b5b
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/bucket_map_join_tez_empty.q
@@ -0,0 +1,18 @@
+set hive.mapred.mode=nonstrict;
+set hive.explain.user=false;
+set hive.auto.convert.join=true;
+set hive.auto.convert.join.noconditionaltask=true;
+set hive.auto.convert.join.noconditionaltask.size=1;
+
+CREATE TABLE tab1(key1 int, value string) CLUSTERED BY (key1) INTO 10 BUCKETS 
STORED AS TEXTFILE;
+CREATE TABLE tab2 (key1 int, value string) CLUSTERED BY (key1) INTO 10 BUCKETS 
STORED AS TEXTFILE;
+
+
+-- HIVE-18721 : Make sure only certain buckets have data.
+insert into tab1 VALUES (1,"abc"),(4,"def"),(8, "ghi");
+insert into tab2 VALUES (1, "abc"), (5, "aa");
+
+set hive.convert.join.bucket.mapjoin.tez = true;
+
+explain select * from tab1, tab2 where tab1.key1 = tab2.key1;
+select * from tab1, tab2 where tab1.key1 = tab2.key1;

http://git-wip-us.apache.org/repos/asf/hive/blob/21781125/ql/src/test/results/clientpositive/llap/bucket_map_join_tez_empty.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/bucket_map_join_tez_empty.q.out 
b/ql/src/test/results/clientpositive/llap/bucket_map_join_tez_empty.q.out
new file mode 100644
index 000..33825da
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/bucket_map_join_tez_empty.q.out
@@ -0,0 +1,1

hive git commit: HIVE-18638: Triggers for multi-pool move, failing to initiate the move event (Prasanth Jayachandran reviewed by Sergey Shelukhin)

2018-02-15 Thread prasanthj
Repository: hive
Updated Branches:
  refs/heads/master 974d41902 -> dabb62d6e


HIVE-18638: Triggers for multi-pool move, failing to initiate the move event 
(Prasanth Jayachandran reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/dabb62d6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/dabb62d6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/dabb62d6

Branch: refs/heads/master
Commit: dabb62d6e66f747516e976610692febe07494fe8
Parents: 974d419
Author: Prasanth Jayachandran 
Authored: Thu Feb 15 11:12:33 2018 -0800
Committer: Prasanth Jayachandran 
Committed: Thu Feb 15 11:12:33 2018 -0800

--
 .../hive/ql/exec/tez/TezSessionPoolManager.java|  2 +-
 .../hive/ql/exec/tez/TezSessionPoolSession.java|  2 ++
 .../hadoop/hive/ql/exec/tez/WorkloadManager.java   | 17 +++--
 3 files changed, 14 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/dabb62d6/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java
index d0b32b8..46cfe56 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java
@@ -179,7 +179,7 @@ public class TezSessionPoolManager extends 
TezSessionPoolSession.AbstractTrigger
 }
   }
 
-  public void initTriggers(final HiveConf conf) throws HiveException {
+  public void initTriggers(final HiveConf conf) {
 if (triggerValidatorRunnable == null) {
   final long triggerValidationIntervalMs = HiveConf.getTimeVar(conf, 
ConfVars
 .HIVE_TRIGGER_VALIDATION_INTERVAL, TimeUnit.MILLISECONDS);

http://git-wip-us.apache.org/repos/asf/hive/blob/dabb62d6/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolSession.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolSession.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolSession.java
index 13b0a30..d1b3fec 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolSession.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolSession.java
@@ -76,6 +76,7 @@ class TezSessionPoolSession extends TezSessionState {
 Runnable triggerValidatorRunnable = getTriggerValidatorRunnable();
 
scheduledExecutorService.scheduleWithFixedDelay(triggerValidatorRunnable, 
triggerValidationIntervalMs,
   triggerValidationIntervalMs, TimeUnit.MILLISECONDS);
+LOG.info("Started trigger validator with interval: {} ms", 
triggerValidationIntervalMs);
   }
 }
 
@@ -83,6 +84,7 @@ class TezSessionPoolSession extends TezSessionState {
   if (scheduledExecutorService != null) {
 scheduledExecutorService.shutdownNow();
 scheduledExecutorService = null;
+LOG.info("Stopped trigger validator");
   }
 }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/dabb62d6/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java
index 25922d9..00e2c20 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java
@@ -241,6 +241,7 @@ public class WorkloadManager extends 
TezSessionPoolSession.AbstractTriggerValida
   }
 
   public void start() throws Exception {
+initTriggers();
 tezAmPool.start();
 if (expirationTracker != null) {
   expirationTracker.start();
@@ -249,13 +250,17 @@ public class WorkloadManager extends 
TezSessionPoolSession.AbstractTriggerValida
   amComm.start();
 }
 allocationManager.start();
+  }
 
-final long triggerValidationIntervalMs = HiveConf.getTimeVar(conf,
-  HiveConf.ConfVars.HIVE_TRIGGER_VALIDATION_INTERVAL, 
TimeUnit.MILLISECONDS);
-TriggerActionHandler triggerActionHandler = new 
KillMoveTriggerActionHandler(this);
-triggerValidatorRunnable = new 
PerPoolTriggerValidatorRunnable(perPoolProviders, triggerActionHandler,
-  triggerValidationIntervalMs);
-startTriggerValidator(triggerValidationIntervalMs);
+  private void initTriggers() {
+if (triggerValidatorRunnable == null) {
+  final long triggerValidationIntervalMs = HiveConf.getTimeVar(conf,
+HiveConf.ConfVars.HIVE_TRIGGER_VALIDATION_INT

[2/2] hive git commit: HIVE-18421 : Vectorized execution handles overflows in a different manner than non-vectorized execution (Vihang Karajgaonkar, reviewed by Sahil Takiar)

2018-02-15 Thread vihangk1
HIVE-18421 : Vectorized execution handles overflows in a different manner than 
non-vectorized execution (Vihang Karajgaonkar, reviewed by Sahil Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/974d4190
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/974d4190
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/974d4190

Branch: refs/heads/master
Commit: 974d4190235477ff3a8d908e899421ba0c4117c3
Parents: 6a26871
Author: Vihang Karajgaonkar 
Authored: Sun Jan 14 09:47:26 2018 -0800
Committer: Vihang Karajgaonkar 
Committed: Thu Feb 15 08:06:53 2018 -0800

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |7 +-
 .../VectorizedArithmeticBench.java  |   22 +
 .../ColumnArithmeticColumn.txt  |   16 +-
 .../ColumnArithmeticScalar.txt  |   15 +-
 .../ExpressionTemplates/ColumnDivideColumn.txt  |   14 +
 .../ExpressionTemplates/ColumnUnaryMinus.txt|   15 +
 .../ScalarArithmeticColumn.txt  |   14 +
 .../vectorization/TestTemplates/TestClass.txt   |1 +
 ...erationVectorExpressionCheckedEvaluation.txt |   65 +
 ...erationVectorExpressionCheckedEvaluation.txt |   60 +
 .../exec/vector/VectorExpressionDescriptor.java |   21 +-
 .../ql/exec/vector/VectorizationContext.java|   10 +-
 .../LongColModuloLongColumnChecked.java |   51 +
 .../exec/vector/expressions/OverflowUtils.java  |  119 ++
 .../expressions/PosModDoubleToDouble.java   |   22 +-
 .../vector/expressions/PosModLongToLong.java|   35 +-
 .../vector/expressions/VectorExpression.java|   11 +
 .../hive/ql/udf/generic/GenericUDFOPMinus.java  |6 +
 .../hive/ql/udf/generic/GenericUDFOPMod.java|7 +
 .../ql/udf/generic/GenericUDFOPMultiply.java|6 +
 .../ql/udf/generic/GenericUDFOPNegative.java|5 +-
 .../hive/ql/udf/generic/GenericUDFOPPlus.java   |5 +
 .../exec/vector/expressions/TestUnaryMinus.java |   33 +
 .../TestVectorArithmeticExpressions.java|   90 +-
 .../expressions/TestVectorMathFunctions.java|   45 +
 .../vectorization_numeric_overflows.q   |  158 +++
 .../vectorization_numeric_overflows.q.out   | 1150 ++
 .../apache/hadoop/hive/tools/GenVectorCode.java |  121 +-
 .../hadoop/hive/tools/GenVectorTestCode.java|  142 ++-
 29 files changed, 2228 insertions(+), 38 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/974d4190/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index f3980b6..3d777f9 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -2954,7 +2954,10 @@ public class HiveConf extends Configuration {
 "The default value is true."),
 
HIVE_VECTORIZATION_ROW_IDENTIFIER_ENABLED("hive.vectorized.row.identifier.enabled",
 true,
 "This flag should be set to true to enable vectorization of ROW__ID."),
-
+
HIVE_VECTORIZATION_USE_CHECKED_EXPRESSIONS("hive.vectorized.use.checked.expressions",
 false,
+"This flag should be set to true to use overflow checked vector 
expressions when available.\n" +
+"For example, arithmetic expressions which can overflow the output 
data type can be evaluated using\n" +
+" checked vector expressions so that they produce same result as 
non-vectorized evaluation."),
 HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED(
 "hive.vectorized.input.format.supports.enabled",
 "decimal_64",
@@ -2965,7 +2968,7 @@ public class HiveConf extends Configuration {
 
HIVE_TEST_VECTORIZATION_ENABLED_OVERRIDE("hive.test.vectorized.execution.enabled.override",
 "none", new StringSet("none", "enable", "disable"),
 "internal use only, used to override the 
hive.vectorized.execution.enabled setting and\n" +
-"turn off vectorization.  The default is false, or course",
+"turn off vectorization.  The default is false, of course",
 true),
 
 HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true, "This property 
has been extended to control "

http://git-wip-us.apache.org/repos/asf/hive/blob/974d4190/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/VectorizedArithmeticBench.java
--
diff --git 
a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/VectorizedArithmeticBench.java
 
b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/VectorizedArithmeticBench.java
index 8016630..70ee9b7 100644
--- 
a/itests/hive-jmh/src/main/jav

[1/2] hive git commit: HIVE-18421 : Vectorized execution handles overflows in a different manner than non-vectorized execution (Vihang Karajgaonkar, reviewed by Sahil Takiar)

2018-02-15 Thread vihangk1
Repository: hive
Updated Branches:
  refs/heads/master 6a268713f -> 974d41902


http://git-wip-us.apache.org/repos/asf/hive/blob/974d4190/ql/src/test/results/clientpositive/vectorization_numeric_overflows.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/vectorization_numeric_overflows.q.out 
b/ql/src/test/results/clientpositive/vectorization_numeric_overflows.q.out
new file mode 100644
index 000..344db2b
--- /dev/null
+++ b/ql/src/test/results/clientpositive/vectorization_numeric_overflows.q.out
@@ -0,0 +1,1150 @@
+PREHOOK: query: CREATE TABLE test_overflow (
+ctinyint1 TINYINT,
+ctinyint2 TINYINT,
+csmallint1 SMALLINT,
+csmallint2 SMALLINT,
+cint1 INT,
+cint2 INT,
+cbigint1 BIGINT,
+cbigint2 BIGINT,
+cfloat1 FLOAT,
+cfloat2 FLOAT,
+cdouble1 DOUBLE,
+cdouble2 DOUBLE)
+STORED AS PARQUET
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_overflow
+POSTHOOK: query: CREATE TABLE test_overflow (
+ctinyint1 TINYINT,
+ctinyint2 TINYINT,
+csmallint1 SMALLINT,
+csmallint2 SMALLINT,
+cint1 INT,
+cint2 INT,
+cbigint1 BIGINT,
+cbigint2 BIGINT,
+cfloat1 FLOAT,
+cfloat2 FLOAT,
+cdouble1 DOUBLE,
+cdouble2 DOUBLE)
+STORED AS PARQUET
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_overflow
+PREHOOK: query: insert into test_overflow values (-128, 127, -32768, 32767, 
-2147483648, 2147483647, -9223372036854775808, 9223372036854775807, 
1.401298464324817E-45, 3.4028234663852886E38, 4.9E-324, 1.7976931348623157E308)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_overflow
+POSTHOOK: query: insert into test_overflow values (-128, 127, -32768, 32767, 
-2147483648, 2147483647, -9223372036854775808, 9223372036854775807, 
1.401298464324817E-45, 3.4028234663852886E38, 4.9E-324, 1.7976931348623157E308)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_overflow
+POSTHOOK: Lineage: test_overflow.cbigint1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cbigint2 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cdouble1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cdouble2 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cfloat1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cfloat2 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cint1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cint2 SCRIPT []
+POSTHOOK: Lineage: test_overflow.csmallint1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.csmallint2 SCRIPT []
+POSTHOOK: Lineage: test_overflow.ctinyint1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.ctinyint2 SCRIPT []
+PREHOOK: query: insert into test_overflow values (127, -128, 32767, -32768, 
2147483647, -2147483648, 9223372036854775807, -9223372036854775808, 
3.4028234663852886E38, 1.401298464324817E-45, 1.7976931348623157E308, 4.9E-324)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_overflow
+POSTHOOK: query: insert into test_overflow values (127, -128, 32767, -32768, 
2147483647, -2147483648, 9223372036854775807, -9223372036854775808, 
3.4028234663852886E38, 1.401298464324817E-45, 1.7976931348623157E308, 4.9E-324)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_overflow
+POSTHOOK: Lineage: test_overflow.cbigint1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cbigint2 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cdouble1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cdouble2 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cfloat1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cfloat2 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cint1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cint2 SCRIPT []
+POSTHOOK: Lineage: test_overflow.csmallint1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.csmallint2 SCRIPT []
+POSTHOOK: Lineage: test_overflow.ctinyint1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.ctinyint2 SCRIPT []
+PREHOOK: query: insert into test_overflow values (64, 65, 32767, -32768, 
1073741824, 1073741825, 9223372036854775807, -9223372036854775808, 
3.4028234663852886E38, 1.401298464324817E-45, 1.7976931348623157E308, 4.9E-324)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_overflow
+POSTHOOK: query: insert into test_overflow values (64, 65, 32767, -32768, 
1073741824, 1073741825, 9223372036854775807, -9223372036854775808, 
3.4028234663852886E38, 1.401298464324817E-45, 1.7976931348623157E308, 4.9E-324)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_overflow
+POSTHOOK: Lineage: test_overflow.cbigint1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cbigint2 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cdouble1 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cdouble2 SCRIPT []
+POSTHOOK: Lineage: test_overflow.cfloat1 SC

hive git commit: HIVE-18717 : Avoid transitive dependency on jetty 6.x (Ashutosh Chauhan via Zoltan Haindrich)

2018-02-15 Thread hashutosh
Repository: hive
Updated Branches:
  refs/heads/master 7ddac02b8 -> 6a268713f


HIVE-18717 : Avoid transitive dependency on jetty 6.x (Ashutosh Chauhan via 
Zoltan Haindrich)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6a268713
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6a268713
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6a268713

Branch: refs/heads/master
Commit: 6a268713fe9b26b03647907db316734e492010e9
Parents: 7ddac02
Author: Ashutosh Chauhan 
Authored: Wed Feb 14 17:51:03 2018 -0800
Committer: Ashutosh Chauhan 
Committed: Thu Feb 15 08:06:12 2018 -0800

--
 hcatalog/core/pom.xml | 12 +++-
 hcatalog/hcatalog-pig-adapter/pom.xml | 12 +++-
 llap-server/pom.xml   |  8 
 llap-tez/pom.xml  |  8 
 pom.xml   | 30 +++---
 ql/pom.xml|  8 
 storage-api/pom.xml   |  2 +-
 7 files changed, 74 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6a268713/hcatalog/core/pom.xml
--
diff --git a/hcatalog/core/pom.xml b/hcatalog/core/pom.xml
index 560e438..b0269cd 100644
--- a/hcatalog/core/pom.xml
+++ b/hcatalog/core/pom.xml
@@ -235,7 +235,17 @@
   ${pig.version}
   h2
   test
-
+ 
+  
+org.mortbay.jetty
+jetty-util
+  
+
+  org.mortbay.jetty
+  jetty
+
+
+   
   
 
   

http://git-wip-us.apache.org/repos/asf/hive/blob/6a268713/hcatalog/hcatalog-pig-adapter/pom.xml
--
diff --git a/hcatalog/hcatalog-pig-adapter/pom.xml 
b/hcatalog/hcatalog-pig-adapter/pom.xml
index c50a4d5..4a2075f 100644
--- a/hcatalog/hcatalog-pig-adapter/pom.xml
+++ b/hcatalog/hcatalog-pig-adapter/pom.xml
@@ -66,7 +66,17 @@
   pig
   ${pig.version}
   h2
-
+ 
+  
+org.mortbay.jetty
+jetty-util
+  
+
+  org.mortbay.jetty
+  jetty
+
+
+   
 
   org.apache.hadoop
   hadoop-hdfs

http://git-wip-us.apache.org/repos/asf/hive/blob/6a268713/llap-server/pom.xml
--
diff --git a/llap-server/pom.xml b/llap-server/pom.xml
index eabe1a8..65cb78f 100644
--- a/llap-server/pom.xml
+++ b/llap-server/pom.xml
@@ -206,7 +206,15 @@
   slider-core
   ${slider.version}
   
+ 
+  org.mortbay.jetty
+  jetty
+
 
+  org.mortbay.jetty
+  jetty-util
+
+   
   asm
   asm
 

http://git-wip-us.apache.org/repos/asf/hive/blob/6a268713/llap-tez/pom.xml
--
diff --git a/llap-tez/pom.xml b/llap-tez/pom.xml
index 69fbea3..50865ad 100644
--- a/llap-tez/pom.xml
+++ b/llap-tez/pom.xml
@@ -154,7 +154,15 @@
   ${tez.version}
   true
   
+  
+  org.mortbay.jetty
+  jetty-util
+
 
+  org.mortbay.jetty
+  jetty
+
+   
   org.slf4j
   slf4j-log4j12
 

http://git-wip-us.apache.org/repos/asf/hive/blob/6a268713/pom.xml
--
diff --git a/pom.xml b/pom.xml
index e220891..a242fbf 100644
--- a/pom.xml
+++ b/pom.xml
@@ -488,7 +488,17 @@
 avro-mapred
 hadoop2
 ${avro.version}
-  
+
+  
+org.mortbay.jetty
+jetty-util
+  
+
+  org.mortbay.jetty
+  jetty
+
+
+ 
   
 org.apache.derby
 derby
@@ -538,7 +548,17 @@
 org.apache.pig
 pig
 ${pig.version}
-  
+ 
+  
+org.mortbay.jetty
+jetty-util
+  
+
+  org.mortbay.jetty
+  jetty
+
+
+ 
   
 org.apache.thrift
 libfb303
@@ -719,7 +739,11 @@
 hadoop-common
 ${hadoop.version}
 
-  
+   
+org.mortbay.jetty
+jetty-sslengine
+  
+ 
 org.slf4j
 slf4j-log4j12
   

http://git-wip-us.apache.org/repos/asf/hive/blob/6a268713/ql/pom.xml
--
diff --git a/ql/pom.xml b/ql/pom.xml
index 2d1034c..5f917c1 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -655,7 +655,15 @@
   true
   test
   
+   
+  org.mortbay.jetty
+