[hive] branch master updated: HIVE-20078 : Remove ATSHook

2020-04-18 Thread hashutosh
This is an automated email from the ASF dual-hosted git repository.

hashutosh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 11abab2  HIVE-20078 : Remove ATSHook
11abab2 is described below

commit 11abab21be0f5fbf6eeb39acbf2963618352b6dd
Author: Ashutosh Chauhan 
AuthorDate: Sat Dec 14 08:50:54 2019 -0800

HIVE-20078 : Remove ATSHook
---
 .../java/org/apache/hadoop/hive/conf/HiveConf.java |   3 -
 .../org/apache/hadoop/hive/ql/hooks/ATSHook.java   | 495 -
 .../apache/hadoop/hive/ql/hooks/TestATSHook.java   |  59 ---
 3 files changed, 557 deletions(-)

diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 7b3acad..9e46e7b 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -582,9 +582,6 @@ public class HiveConf extends Configuration {
 "Comma-separated list of statistics publishers to be invoked on 
counters on each job. \n" +
 "A client stats publisher is specified as the name of a Java class 
which implements the \n" +
 "org.apache.hadoop.hive.ql.stats.ClientStatsPublisher interface."),
-ATSHOOKQUEUECAPACITY("hive.ats.hook.queue.capacity", 64,
-"Queue size for the ATS Hook executor. If the number of outstanding 
submissions \n" +
-"to the ATS executor exceed this amount, the Hive ATS Hook will not 
try to log queries to ATS."),
 EXECPARALLEL("hive.exec.parallel", false, "Whether to execute jobs in 
parallel"),
 EXECPARALLETHREADNUMBER("hive.exec.parallel.thread.number", 8,
 "How many jobs at most can be executed in parallel"),
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
deleted file mode 100644
index 0632f6e..000
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
+++ /dev/null
@@ -1,495 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.hooks;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.ThreadFactory;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.llap.registry.impl.LlapRegistryService;
-import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.QueryState;
-import org.apache.hadoop.hive.ql.exec.ExplainTask;
-import org.apache.hadoop.hive.ql.exec.TaskFactory;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.exec.tez.TezTask;
-import org.apache.hadoop.hive.ql.log.PerfLogger;
-import org.apache.hadoop.hive.ql.parse.ExplainConfiguration;
-import org.apache.hadoop.hive.ql.plan.ExplainWork;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
-import org.apache.hadoop.yarn.client.api.TimelineClient;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hive.common.util.ShutdownHookManager;
-import org.apache.tez.dag.api.TezConfiguration;
-import org.json.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
-
-/**
- * ATSHook sends query + plan info to Yarn App Timeline Server. To enable 
(hadoop 2.4 

svn commit: r1876705 - in /hive/cms/trunk/content: downloads.mdtext javadoc.mdtext

2020-04-18 Thread gates
Author: gates
Date: Sat Apr 18 22:39:18 2020
New Revision: 1876705

URL: http://svn.apache.org/viewvc?rev=1876705=rev
Log:
Updated website for 2.3.7 release.

Modified:
hive/cms/trunk/content/downloads.mdtext
hive/cms/trunk/content/javadoc.mdtext

Modified: hive/cms/trunk/content/downloads.mdtext
URL: 
http://svn.apache.org/viewvc/hive/cms/trunk/content/downloads.mdtext?rev=1876705=1876704=1876705=diff
==
--- hive/cms/trunk/content/downloads.mdtext (original)
+++ hive/cms/trunk/content/downloads.mdtext Sat Apr 18 22:39:18 2020
@@ -11,6 +11,10 @@ directory.
 
 
 ## News
+### 18 April 2020: release 2.3.7 available
+This release works with Hadoop 2.x.y
+You can look at the complete [JIRA change log for this release][HIVE_2_3_7_CL].
+
 ### 26 August 2019: release 3.1.2 available
 This release works with Hadoop 3.x.y.
 You can look at the complete [JIRA change log for this release][HIVE_3_1_2_CL].
@@ -154,6 +158,7 @@ You can look at the complete [JIRA chang
 
 [HIVE_DL]: http://www.apache.org/dyn/closer.cgi/hive/
 [HIVE_3_1_2_CL]: 
https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12344397=Html=12310843
+[HIVE_2_3_7_CL]: 
https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12346056=Text=12310843
 [HIVE_2_3_6_CL]: 
https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12345603=Text=12310843
 [HIVE_2_3_5_CL]: 
https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12345394=Text=12310843
 [HIVE_2_3_4_CL]: 
https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12344319=Text=12310843

Modified: hive/cms/trunk/content/javadoc.mdtext
URL: 
http://svn.apache.org/viewvc/hive/cms/trunk/content/javadoc.mdtext?rev=1876705=1876704=1876705=diff
==
--- hive/cms/trunk/content/javadoc.mdtext (original)
+++ hive/cms/trunk/content/javadoc.mdtext Sat Apr 18 22:39:18 2020
@@ -4,7 +4,7 @@ Recent versions:
 
   * [Hive 3.1.2 Javadocs][r3.1.2]
   * [Hive 3.0.0 Javadocs][r3.0.0]
-  * [Hive 2.3.6 Javadocs][r2.3.6]
+  * [Hive 2.3.7 Javadocs][r2.3.7]
   * [Hive 2.2.0 Javadocs][r2.2.0]
   * [Hive 2.1.1 Javadocs][r2.1.1]
   * [Hive 1.2.2 Javadocs][r1.2.2]
@@ -20,7 +20,7 @@ javadoc and sources jars for use in an I
 
 [r3.1.2]: /javadocs/r3.1.2/api/index.html
 [r3.0.0]: /javadocs/r3.0.0/api/index.html
-[r2.3.6]: /javadocs/r2.3.6/api/index.html
+[r2.3.7]: /javadocs/r2.3.7/api/index.html
 [r2.2.0]: /javadocs/r2.2.0/api/index.html
 [r2.1.1]: /javadocs/r2.1.1/api/index.html
 [r1.2.2]: /javadocs/r1.2.2/api/index.html




svn commit: r1059518 - in /websites/staging/hive/trunk/content: ./ downloads.html javadoc.html

2020-04-18 Thread buildbot
Author: buildbot
Date: Sat Apr 18 22:39:25 2020
New Revision: 1059518

Log:
Staging update by buildbot for hive

Modified:
websites/staging/hive/trunk/content/   (props changed)
websites/staging/hive/trunk/content/downloads.html
websites/staging/hive/trunk/content/javadoc.html

Propchange: websites/staging/hive/trunk/content/
--
--- cms:source-revision (original)
+++ cms:source-revision Sat Apr 18 22:39:25 2020
@@ -1 +1 @@
-1865996
+1876705

Modified: websites/staging/hive/trunk/content/downloads.html
==
--- websites/staging/hive/trunk/content/downloads.html (original)
+++ websites/staging/hive/trunk/content/downloads.html Sat Apr 18 22:39:25 2020
@@ -124,6 +124,9 @@ h2:hover > .headerlink, h3:hover > .head
 guaranteed to be stable. For stable releases, look in the stable
 directory.
 News
+18 April 2020: release 2.3.7 
available
+This release works with Hadoop 2.x.y
+You can look at the complete https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12346056styleName=TextprojectId=12310843;>JIRA
 change log for this release.
 26 August 2019: release 3.1.2 
available
 This release works with Hadoop 3.x.y.
 You can look at the complete https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12344397styleName=HtmlprojectId=12310843;>JIRA
 change log for this release.

Modified: websites/staging/hive/trunk/content/javadoc.html
==
--- websites/staging/hive/trunk/content/javadoc.html (original)
+++ websites/staging/hive/trunk/content/javadoc.html Sat Apr 18 22:39:25 2020
@@ -122,7 +122,7 @@ h2:hover > .headerlink, h3:hover > .head
 
 Hive 3.1.2 Javadocs
 Hive 3.0.0 Javadocs
-Hive 2.3.6 Javadocs
+Hive 2.3.7 Javadocs
 Hive 2.2.0 Javadocs
 Hive 2.1.1 Javadocs
 Hive 1.2.2 Javadocs




svn commit: r1059514 - in /websites/production/hive/content/javadocs: r2.3.6/ r2.3.7/ r2.3.7/api/ r2.3.7/api/org/ r2.3.7/api/org/apache/ r2.3.7/api/org/apache/hadoop/ r2.3.7/api/org/apache/hadoop/fs/

2020-04-18 Thread gates
Author: gates
Date: Sat Apr 18 20:50:03 2020
New Revision: 1059514

Log:
Added javadoc for 2.3.7, removed 2.3.6 javadoc.


[This commit notification would consist of 4090 parts, 
which exceeds the limit of 50 ones, so it was shortened to the summary.]


[hive] branch master updated: HIVE-22684 : Run Eclipse Cleanup Against hbase-handler Module (David Mollitor via Ashutosh Chauhan)

2020-04-18 Thread hashutosh
This is an automated email from the ASF dual-hosted git repository.

hashutosh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new c3fb689  HIVE-22684 : Run Eclipse Cleanup Against hbase-handler Module 
(David Mollitor via Ashutosh Chauhan)
c3fb689 is described below

commit c3fb689ffbac7f8dd45ee5219145d8c3d231e27f
Author: David Mollitor 
AuthorDate: Sat Apr 18 12:31:07 2020 -0700

HIVE-22684 : Run Eclipse Cleanup Against hbase-handler Module (David 
Mollitor via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan 
---
 .../org/apache/hadoop/hive/hbase/HBaseScanRange.java   |  1 +
 .../java/org/apache/hadoop/hive/hbase/HBaseSerDe.java  |  1 -
 .../apache/hadoop/hive/hbase/HBaseSerDeParameters.java |  1 +
 .../apache/hadoop/hive/hbase/HBaseStorageHandler.java  |  1 -
 .../hadoop/hive/hbase/HiveHBaseTableOutputFormat.java  |  2 ++
 .../hive/hbase/HiveHBaseTableSnapshotInputFormat.java  |  3 ---
 .../hive/hbase/struct/AvroHBaseValueFactory.java   |  1 -
 .../org/apache/hadoop/hive/hbase/TestHBaseSerDe.java   | 11 +--
 .../apache/hadoop/hive/hbase/TestLazyHBaseObject.java  | 18 --
 .../org/apache/hadoop/hive/hbase/avro/Address.java |  5 -
 .../org/apache/hadoop/hive/hbase/avro/ContactInfo.java |  3 +++
 .../org/apache/hadoop/hive/hbase/avro/Employee.java|  3 +++
 .../org/apache/hadoop/hive/hbase/avro/HomePhone.java   |  3 +++
 .../org/apache/hadoop/hive/hbase/avro/OfficePhone.java |  3 +++
 14 files changed, 33 insertions(+), 23 deletions(-)

diff --git 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseScanRange.java 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseScanRange.java
index f01748c..79d687f 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseScanRange.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseScanRange.java
@@ -87,6 +87,7 @@ public class HBaseScanRange implements Serializable {
 scan.setFilter(new FilterList(filters));
   }
 
+  @Override
   public String toString() {
 return (startRow == null ? "" : new BytesWritable(startRow).toString()) + 
" ~ " +
 (stopRow == null ? "" : new BytesWritable(stopRow).toString());
diff --git 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
index 1588283..5147d0e 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
@@ -30,7 +30,6 @@ import 
org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
-import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
diff --git 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
index eb3560c..480484c 100644
--- 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
+++ 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
@@ -167,6 +167,7 @@ public class HBaseSerDeParameters {
 throw new IllegalArgumentException("Invalid column name " + columnName);
   }
 
+  @Override
   public String toString() {
 return "[" + columnMappingString + ":" + getColumnNames() + ":" + 
getColumnTypes() + "]";
   }
diff --git 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
index f3735a3..16658d0 100644
--- 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
+++ 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
@@ -42,7 +42,6 @@ import 
org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer;
 import org.apache.hadoop.hive.ql.index.IndexSearchCondition;
diff --git 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java
 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java
index b344e16..f0eccd7 100644
--- 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java
+++ 

[hive] branch master updated: HIVE-22698 : Support Statement#closeOnCompletion() (Iwao Ave via Ashutosh Chauhan)

2020-04-18 Thread hashutosh
This is an automated email from the ASF dual-hosted git repository.

hashutosh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new bb95ad2  HIVE-22698 : Support Statement#closeOnCompletion() (Iwao Ave 
via Ashutosh Chauhan)
bb95ad2 is described below

commit bb95ad243cc0ab028deed516b5f36616d9fd3354
Author: Iwao AVE 
AuthorDate: Sat Apr 18 12:26:07 2020 -0700

HIVE-22698 : Support Statement#closeOnCompletion() (Iwao Ave via Ashutosh 
Chauhan)

Signed-off-by: Ashutosh Chauhan 
---
 .../java/org/apache/hive/jdbc/TestJdbcDriver2.java | 37 ++
 .../org/apache/hive/jdbc/HiveQueryResultSet.java   |  1 +
 .../java/org/apache/hive/jdbc/HiveStatement.java   | 12 +--
 3 files changed, 48 insertions(+), 2 deletions(-)

diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index dbe282d..ba1f39c 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -3250,4 +3250,41 @@ public class TestJdbcDriver2 {
   public void testConnectInvalidDatabase() throws SQLException {
 DriverManager.getConnection("jdbc:hive2:///databasedoesnotexist", "", "");
   }
+
+  @Test
+  public void testStatementCloseOnCompletion() throws SQLException {
+Statement stmt = con.createStatement();
+stmt.closeOnCompletion();
+ResultSet res = stmt.executeQuery("select under_col from " + tableName + " 
limit 1");
+assertTrue(res.next());
+assertFalse(stmt.isClosed());
+assertFalse(res.next());
+assertFalse(stmt.isClosed());
+res.close();
+assertTrue(stmt.isClosed());
+  }
+
+  @Test
+  public void testPreparedStatementCloseOnCompletion() throws SQLException {
+PreparedStatement stmt = con.prepareStatement("select under_col from " + 
tableName + " limit 1");
+stmt.closeOnCompletion();
+ResultSet res = stmt.executeQuery();
+assertTrue(res.next());
+assertFalse(stmt.isClosed());
+assertFalse(res.next());
+assertFalse(stmt.isClosed());
+res.close();
+assertTrue(stmt.isClosed());
+  }
+
+  @Test
+  public void testCloseOnAlreadyOpenedResultSetCompletion() throws Exception {
+PreparedStatement stmt = con.prepareStatement("select under_col from " + 
tableName + " limit 1");
+ResultSet res = stmt.executeQuery();
+assertTrue(res.next());
+stmt.closeOnCompletion();
+assertFalse(stmt.isClosed());
+res.close();
+assertTrue(stmt.isClosed());
+  }
 }
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
index 8563cee..df31a25 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
@@ -276,6 +276,7 @@ public class HiveQueryResultSet extends HiveBaseResultSet {
 if (this.statement != null && (this.statement instanceof HiveStatement)) {
   HiveStatement s = (HiveStatement) this.statement;
   s.closeClientOperation();
+  s.closeOnResultSetCompletion();
 } else {
   // for those stmtHandle passed from HiveDatabaseMetaData instead of 
Statement
   closeOperationHandle(stmtHandle);
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
index 543bf8c..a74a3a8 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
@@ -76,6 +76,7 @@ public class HiveStatement implements java.sql.Statement {
   private final int defaultFetchSize;
   private boolean isScrollableResultset = false;
   private boolean isOperationComplete = false;
+  private boolean closeOnResultSetCompletion = false;
   /**
* We need to keep a reference to the result set to support the following:
* 
@@ -233,6 +234,13 @@ public class HiveStatement implements java.sql.Statement {
 stmtHandle = null;
   }
 
+  void closeOnResultSetCompletion() throws SQLException {
+if (closeOnResultSetCompletion) {
+  resultSet = null;
+  close();
+}
+  }
+
   /*
* (non-Javadoc)
*
@@ -254,7 +262,7 @@ public class HiveStatement implements java.sql.Statement {
 
   // JDK 1.7
   public void closeOnCompletion() throws SQLException {
-throw new SQLFeatureNotSupportedException("Method not supported");
+closeOnResultSetCompletion = true;
   }
 
   /*
@@ -752,7 +760,7 @@ public class HiveStatement implements java.sql.Statement {
 
   // JDK 1.7
   public boolean isCloseOnCompletion() throws SQLException {
-return false;
+return closeOnResultSetCompletion;
   }
 
   /*



[hive] branch master updated: HIVE-23051 : Clean up BucketCodec (David Mollitor via Ashutosh Chauhan)

2020-04-18 Thread hashutosh
This is an automated email from the ASF dual-hosted git repository.

hashutosh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new f888527  HIVE-23051 : Clean up BucketCodec (David Mollitor via 
Ashutosh Chauhan)
f888527 is described below

commit f888527feb3cd912850b4c62cf52bc191558bd7a
Author: David Mollitor 
AuthorDate: Sat Apr 18 11:15:52 2020 -0700

HIVE-23051 : Clean up BucketCodec (David Mollitor via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan 
---
 .../org/apache/hadoop/hive/ql/io/BucketCodec.java  |  50 +-
 .../apache/hadoop/hive/ql/io/TestBucketCodec.java  | 101 +
 2 files changed, 127 insertions(+), 24 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/BucketCodec.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/BucketCodec.java
index eb9ded7..10d9604 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/BucketCodec.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/BucketCodec.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.io;
 
+import com.google.common.base.Preconditions;
+
 /**
  * This class makes sense of {@link RecordIdentifier#getBucketProperty()}.  Up 
until ASF Hive 3.0 this
  * field was simply the bucket ID.  Since 3.0 it does bit packing to store 
several things:
@@ -86,50 +88,50 @@ public enum BucketCodec {
 }
 @Override
 public int encode(AcidOutputFormat.Options options) {
-  int statementId = options.getStatementId() >= 0 ? 
options.getStatementId() : 0;
+  final int statementId = options.getStatementId();
+  final int bucketId = options.getBucketId();
+
+  Preconditions.checkArgument(bucketId >= 0 && bucketId <= MAX_BUCKET_ID, 
"Bucket ID out of range: " + bucketId);
+  Preconditions.checkArgument(statementId >= -1 && statementId <= 
MAX_STATEMENT_ID,
+  "Statement ID out of range: " + statementId);
 
-  assert this.version >=0 && this.version <= MAX_VERSION
-: "Version out of range: " + version;
-  if(!(options.getBucketId() >= 0 && options.getBucketId() <= 
MAX_BUCKET_ID)) {
-throw new IllegalArgumentException("bucketId out of range: " + 
options.getBucketId());
-  }
-  if(!(statementId >= 0 && statementId <= MAX_STATEMENT_ID)) {
-throw new IllegalArgumentException("statementId out of range: " + 
statementId);
-  }
-  return this.version << (1 + NUM_BUCKET_ID_BITS + 4 + 
NUM_STATEMENT_ID_BITS) |
-options.getBucketId() << (4 + NUM_STATEMENT_ID_BITS) | statementId;
+  return this.version << (1 + NUM_BUCKET_ID_BITS + 4 + 
NUM_STATEMENT_ID_BITS)
+  | options.getBucketId() << (4 + NUM_STATEMENT_ID_BITS) | Math.max(0, 
statementId);
 }
   };
   private static final int TOP3BITS_MASK = 
0b1110_______;
   private static final int NUM_VERSION_BITS = 3;
   private static final int NUM_BUCKET_ID_BITS = 12;
   private static final int NUM_STATEMENT_ID_BITS = 12;
-  private static final int MAX_VERSION = (1 << NUM_VERSION_BITS) - 1;
+  public static final int MAX_VERSION = (1 << NUM_VERSION_BITS) - 1;
   public static final int MAX_BUCKET_ID = (1 << NUM_BUCKET_ID_BITS) - 1;
-  private static final int MAX_STATEMENT_ID = (1 << NUM_STATEMENT_ID_BITS) - 1;
+  public static final int MAX_STATEMENT_ID = (1 << NUM_STATEMENT_ID_BITS) - 1;
 
   public static BucketCodec determineVersion(int bucket) {
-assert 7 << 29 == BucketCodec.TOP3BITS_MASK;
-//look at top 3 bits and return appropriate enum
 try {
+  // look at top 3 bits and return appropriate enum
   return getCodec((BucketCodec.TOP3BITS_MASK & bucket) >>> 29);
-}
-catch(IllegalArgumentException ex) {
-  throw new IllegalArgumentException(ex.getMessage() + " Cannot decode 
version from " + bucket);
+} catch (IllegalArgumentException iae) {
+  throw new IllegalArgumentException("Cannot decode version from bucket 
number: " + Integer.toHexString(bucket),
+  iae);
 }
   }
+
   public static BucketCodec getCodec(int version) {
 switch (version) {
-  case 0:
-return BucketCodec.V0;
-  case 1:
-return BucketCodec.V1;
-  default:
-throw new IllegalArgumentException("Illegal 'bucket' format. Version=" 
+ version);
+case 0:
+  return BucketCodec.V0;
+case 1:
+  return BucketCodec.V1;
+default:
+  throw new IllegalArgumentException("Illegal 'bucket' format. Version=" + 
version);
 }
   }
+
   final int version;
+
   BucketCodec(int version) {
+Preconditions.checkPositionIndex(version, MAX_VERSION, "Version out of 
range: " + version);
 this.version = version;
   }
 
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestBucketCodec.java 
b/ql/src/test/org/apache/hadoop/hive/ql/io/TestBucketCodec.java
new file mode 100644
index 000..a5b843f
--- /dev/null
+++ 

[hive] branch master updated: HIVE-23239 : Remove snakeyaml lib from Hive distribution via transitive dependency (Roohi Syeda via Ashutosh Chauhan)

2020-04-18 Thread hashutosh
This is an automated email from the ASF dual-hosted git repository.

hashutosh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 9940cab  HIVE-23239 : Remove snakeyaml lib from Hive distribution via 
transitive dependency (Roohi Syeda via Ashutosh Chauhan)
9940cab is described below

commit 9940cab2805527c567f55760f9856b0f9bf98b67
Author: Roohi Syeda 
AuthorDate: Sat Apr 18 10:54:32 2020 -0700

HIVE-23239 : Remove snakeyaml lib from Hive distribution via transitive 
dependency (Roohi Syeda via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan 
---
 kafka-handler/pom.xml | 8 
 llap-server/pom.xml   | 4 
 ql/pom.xml| 8 
 3 files changed, 20 insertions(+)

diff --git a/kafka-handler/pom.xml b/kafka-handler/pom.xml
index 4e58cb9..0ad3973 100644
--- a/kafka-handler/pom.xml
+++ b/kafka-handler/pom.xml
@@ -68,6 +68,10 @@
   org.slf4j
   slf4j-api
 
+
+  org.yaml
+  snakeyaml
+
   
 
 
@@ -134,6 +138,10 @@
   test
   
 
+  org.yaml
+  snakeyaml
+
+
   org.apache.avro
   avro
 
diff --git a/llap-server/pom.xml b/llap-server/pom.xml
index e03de9c..3d4e3da 100644
--- a/llap-server/pom.xml
+++ b/llap-server/pom.xml
@@ -129,6 +129,10 @@
   commons-logging
   commons-logging
 
+ 
+  org.yaml
+  snakeyaml
+
   
 
 
diff --git a/ql/pom.xml b/ql/pom.xml
index d1846c9..a0e77a1 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -386,6 +386,10 @@
   jackson-core
 
 
+  org.yaml
+  snakeyaml
+
+
   org.apache.calcite.avatica
   avatica-core
 
@@ -399,6 +403,10 @@
   org.apache.calcite.avatica
   avatica-core
 
+
+  org.yaml
+  snakeyaml
+
   
 
 



[hive] branch master updated (15ebf9e -> a3f3df0)

2020-04-18 Thread hashutosh
This is an automated email from the ASF dual-hosted git repository.

hashutosh pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 15ebf9e  HIVE-23194 : Use Queue Instead of List for CollectOperator 
(David Mollitor via Ashutosh Chauhan)
 new bc225fc  HIVE-23153 : deregister from zookeeper is not properly worked 
on kerberized environment (Eugene Chung via Ashutosh Chauhan)
 new a3f3df0  HIVE-23196 : Reduce number of delete calls to NN during 
Context::clear (Attila Magyar via Ashutosh Chauhan)

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 ql/src/java/org/apache/hadoop/hive/ql/Context.java | 23 --
 .../apache/hive/service/server/HiveServer2.java| 19 +-
 2 files changed, 35 insertions(+), 7 deletions(-)



[hive] 02/02: HIVE-23196 : Reduce number of delete calls to NN during Context::clear (Attila Magyar via Ashutosh Chauhan)

2020-04-18 Thread hashutosh
This is an automated email from the ASF dual-hosted git repository.

hashutosh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit a3f3df0f97d857482a3042feca940251c4557673
Author: Attila Magyar 
AuthorDate: Sat Apr 18 10:44:47 2020 -0700

HIVE-23196 : Reduce number of delete calls to NN during Context::clear 
(Attila Magyar via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan 
---
 ql/src/java/org/apache/hadoop/hive/ql/Context.java | 23 --
 1 file changed, 21 insertions(+), 2 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
index d618ef9..9f59d4c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
@@ -23,6 +23,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
 import java.text.SimpleDateFormat;
+import java.util.Collection;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -671,6 +672,10 @@ public class Context {
 for (Map.Entry entry : fsScratchDirs.entrySet()) {
   try {
 Path p = entry.getValue();
+if (p.toUri().getPath().contains(stagingDir) && subDirOf(p, 
fsScratchDirs.values())  ) {
+  LOG.debug("Skip deleting stagingDir: " + p);
+  continue; // staging dir is deleted when deleting the scratch dir
+}
 if(resultCacheDir == null || 
!p.toUri().getPath().contains(resultCacheDir)) {
   // delete only the paths which aren't result cache dir path
   // because that will be taken care by removeResultCacheDir
@@ -687,6 +692,15 @@ public class Context {
 fsScratchDirs.clear();
   }
 
+  private boolean subDirOf(Path path, Collection parents) {
+for (Path each : parents) {
+  if (!path.equals(each) && FileUtils.isPathWithinSubtree(path, each)) {
+return true;
+  }
+}
+return false;
+  }
+
   /**
* Remove any created directories for CTEs.
*/
@@ -843,7 +857,7 @@ public class Context {
   subContext.clear();
 }
 // Then clear this context
-  if (resDir != null) {
+  if (resDir != null && !isInScratchDir(resDir)) { // resDir is inside the 
scratch dir, removeScratchDir will take care of removing it
 try {
   FileSystem fs = resDir.getFileSystem(conf);
   LOG.debug("Deleting result dir: {}", resDir);
@@ -853,7 +867,7 @@ public class Context {
 }
   }
 
-if (resFile != null) {
+if (resFile != null && !isInScratchDir(resFile.getParent())) { // resFile 
is inside the scratch dir, removeScratchDir will take care of removing it
   try {
 FileSystem fs = resFile.getFileSystem(conf);
 LOG.debug("Deleting result file: {}",  resFile);
@@ -871,6 +885,11 @@ public class Context {
 setNeedLockMgr(false);
   }
 
+  private boolean isInScratchDir(Path path) {
+return path.toUri().getPath().startsWith(localScratchDir)
+  || 
path.toUri().getPath().startsWith(nonLocalScratchPath.toUri().getPath());
+  }
+
   public DataInput getStream() {
 try {
   if (!initialized) {



[hive] 01/02: HIVE-23153 : deregister from zookeeper is not properly worked on kerberized environment (Eugene Chung via Ashutosh Chauhan)

2020-04-18 Thread hashutosh
This is an automated email from the ASF dual-hosted git repository.

hashutosh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit bc225fcb0cd9c21ba38a466f98488b74d6c83d13
Author: Eugene Chung 
AuthorDate: Fri Apr 17 21:16:48 2020 -0700

HIVE-23153 : deregister from zookeeper is not properly worked on kerberized 
environment (Eugene Chung via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan 
---
 .../org/apache/hive/service/server/HiveServer2.java   | 19 ++-
 1 file changed, 14 insertions(+), 5 deletions(-)

diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java 
b/service/src/java/org/apache/hive/service/server/HiveServer2.java
index 42b7e59..d600f3a 100644
--- a/service/src/java/org/apache/hive/service/server/HiveServer2.java
+++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java
@@ -115,6 +115,7 @@ import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.HttpClients;
 import org.apache.http.util.EntityUtils;
 import org.apache.logging.log4j.util.Strings;
+import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.WatchedEvent;
 import org.apache.zookeeper.Watcher;
 import org.apache.zookeeper.ZooDefs.Ids;
@@ -133,10 +134,12 @@ import 
com.google.common.util.concurrent.ThreadFactoryBuilder;
  *
  */
 public class HiveServer2 extends CompositeService {
-  private static CountDownLatch deleteSignal;
   private static final Logger LOG = LoggerFactory.getLogger(HiveServer2.class);
   public static final String INSTANCE_URI_CONFIG = "hive.server2.instance.uri";
   private static final int SHUTDOWN_TIME = 60;
+  private static CountDownLatch zkDeleteSignal;
+  private static volatile KeeperException.Code zkDeleteResultCode;
+
   private CLIService cliService;
   private ThriftCLIService thriftCLIService;
   private CuratorFramework zKClientForPrivSync = null;
@@ -566,7 +569,7 @@ public class HiveServer2 extends CompositeService {
* @return
* @throws Exception
*/
-  private void setUpZooKeeperAuth(HiveConf hiveConf) throws Exception {
+  private static void setUpZooKeeperAuth(HiveConf hiveConf) throws Exception {
 if (ZookeeperUtils.isKerberosEnabled(hiveConf)) {
   String principal = 
hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
   if (principal.isEmpty()) {
@@ -1099,6 +1102,7 @@ public class HiveServer2 extends CompositeService {
*/
   static void deleteServerInstancesFromZooKeeper(String versionNumber) throws 
Exception {
 HiveConf hiveConf = new HiveConf();
+setUpZooKeeperAuth(hiveConf);
 CuratorFramework zooKeeperClient = 
hiveConf.getZKConfig().getNewZookeeperClient();
 zooKeeperClient.start();
 String rootNamespace = 
hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_ZOOKEEPER_NAMESPACE);
@@ -1109,7 +1113,7 @@ public class HiveServer2 extends CompositeService {
 // Now for each path that is for the given versionNumber, delete the znode 
from ZooKeeper
 for (int i = 0; i < znodePaths.size(); i++) {
   String znodePath = znodePaths.get(i);
-  deleteSignal = new CountDownLatch(1);
+  zkDeleteSignal = new CountDownLatch(1);
   if (znodePath.contains("version=" + versionNumber + ";")) {
 String fullZnodePath =
 ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + rootNamespace
@@ -1119,7 +1123,11 @@ public class HiveServer2 extends CompositeService {
 zooKeeperClient.delete().guaranteed().inBackground(new 
DeleteCallBack())
 .forPath(fullZnodePath);
 // Wait for the delete to complete
-deleteSignal.await();
+zkDeleteSignal.await();
+final KeeperException.Code rc = HiveServer2.zkDeleteResultCode;
+if (rc != KeeperException.Code.OK) {
+  throw KeeperException.create(rc);
+}
 // Get the updated path list
 znodePathsUpdated =
 zooKeeperClient.getChildren().forPath(
@@ -1138,7 +1146,8 @@ public class HiveServer2 extends CompositeService {
 public void processResult(CuratorFramework zooKeeperClient, CuratorEvent 
event)
 throws Exception {
   if (event.getType() == CuratorEventType.DELETE) {
-deleteSignal.countDown();
+zkDeleteResultCode = KeeperException.Code.get(event.getResultCode());
+zkDeleteSignal.countDown();
   }
 }
   }