[hive] branch branch-3 updated: HIVE-27678: Backport HIVE-26127 to branch-3 (Vihang Karajgaonkar reviewed by Ayush Saxena)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3 by this push: new 4cd5142aa4e HIVE-27678: Backport HIVE-26127 to branch-3 (Vihang Karajgaonkar reviewed by Ayush Saxena) 4cd5142aa4e is described below commit 4cd5142aa4e2e1795593ba03305da21fd4dd6fb1 Author: Vihang Karajgaonkar <27933586+vihan...@users.noreply.github.com> AuthorDate: Sat Sep 9 22:18:23 2023 -0700 HIVE-27678: Backport HIVE-26127 to branch-3 (Vihang Karajgaonkar reviewed by Ayush Saxena) --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 024fc64d924..0e0cfbc531d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -4278,7 +4278,7 @@ private void constructOneLBLocationMap(FileStatus fSta, // But not sure why we changed not to delete the oldPath in HIVE-8750 if it is // not the destf or its subdir? isOldPathUnderDestf = isSubDir(oldPath, destPath, oldFs, destFs, false); - if (isOldPathUnderDestf) { + if (isOldPathUnderDestf && oldFs.exists(oldPath)) { cleanUpOneDirectoryForReplace(oldPath, oldFs, pathFilter, conf, purge, isNeedRecycle); } } catch (IOException e) {
[hive] branch branch-3 updated: HIVE-27543 : Backport of HIVE-24039 (Aman Raj, reviewed by Vihang Karajgaonkar)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3 by this push: new 09a7467d749 HIVE-27543 : Backport of HIVE-24039 (Aman Raj, reviewed by Vihang Karajgaonkar) 09a7467d749 is described below commit 09a7467d749fc2abe6d1d0ef26d3d2d1e55df5a1 Author: Aman Raj <104416558+amanraj2...@users.noreply.github.com> AuthorDate: Sun Aug 6 02:55:57 2023 +0530 HIVE-27543 : Backport of HIVE-24039 (Aman Raj, reviewed by Vihang Karajgaonkar) --- service/src/resources/hive-webapps/static/js/jquery.min.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/service/src/resources/hive-webapps/static/js/jquery.min.js b/service/src/resources/hive-webapps/static/js/jquery.min.js index 4d9b3a25875..47b639702cc 100644 --- a/service/src/resources/hive-webapps/static/js/jquery.min.js +++ b/service/src/resources/hive-webapps/static/js/jquery.min.js @@ -1,2 +1,2 @@ -/*! jQuery v3.3.1 | (c) JS Foundation and other contributors | jquery.org/license */ -!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(e,t){"use strict";var n=[],r=e.document,i=Object.getPrototypeOf,o=n.slice,a=n.concat,s=n.push,u=n.indexOf,l={},c=l.toString,f=l.hasOwnProperty,p=f.toString,d=p.call(Object),h={},g=function e(t){return"function"==typeof [...] +/*! jQuery v3.5.0 | (c) JS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],r=Object.getPrototypeOf,s=t.slice,g=t.flat?function(e){return t.flat.call(e)}:function(e){return t.concat.apply([],e)},u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l= [...]
[hive] branch branch-3 updated: HIVE-27508 : Backport of HIVE-21584 to branch-3 (Aman Raj, reviewed by Vihang Karajgaonkar)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3 by this push: new e648a5518a1 HIVE-27508 : Backport of HIVE-21584 to branch-3 (Aman Raj, reviewed by Vihang Karajgaonkar) e648a5518a1 is described below commit e648a5518a1106a87abd2a21da92f63a5e6884fc Author: Aman Raj <104416558+amanraj2...@users.noreply.github.com> AuthorDate: Sat Aug 5 10:42:33 2023 +0530 HIVE-27508 : Backport of HIVE-21584 to branch-3 (Aman Raj, reviewed by Vihang Karajgaonkar) --- .../src/java/org/apache/hive/beeline/Commands.java | 2 +- .../org/apache/hadoop/hive/common/JavaUtils.java | 57 ++--- .../hive/llap/daemon/impl/FunctionLocalizer.java | 18 ++- .../hadoop/hive/ql/exec/AddToClassPathAction.java | 87 + .../hive/ql/exec/SerializationUtilities.java | 120 ++--- .../org/apache/hadoop/hive/ql/exec/Utilities.java | 97 +++--- .../apache/hadoop/hive/ql/exec/mr/ExecDriver.java | 7 +- .../apache/hadoop/hive/ql/exec/mr/ExecMapper.java | 22 +--- .../apache/hadoop/hive/ql/exec/mr/ExecReducer.java | 20 +-- .../hive/ql/exec/spark/SparkRecordHandler.java | 29 ++--- .../hadoop/hive/ql/exec/tez/RecordProcessor.java | 16 +-- .../hadoop/hive/ql/session/SessionState.java | 57 + .../hive/ql/exec/TestAddToClassPathAction.java | 142 + .../hive/spark/client/SparkClientUtilities.java| 23 +++- .../hive/metastore/utils/MetaStoreUtils.java | 20 ++- 15 files changed, 456 insertions(+), 261 deletions(-) diff --git a/beeline/src/java/org/apache/hive/beeline/Commands.java b/beeline/src/java/org/apache/hive/beeline/Commands.java index 851042f3f9d..f14564a81ac 100644 --- a/beeline/src/java/org/apache/hive/beeline/Commands.java +++ b/beeline/src/java/org/apache/hive/beeline/Commands.java @@ -169,7 +169,7 @@ public class Commands { return false; } -URLClassLoader classLoader = (URLClassLoader) Thread.currentThread().getContextClassLoader(); +ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); try { beeLine.debug(jarPath + " is added to the local beeline."); URLClassLoader newClassLoader = new URLClassLoader(new URL[]{p.toURL()}, classLoader); diff --git a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java index 54d6b65fa17..93f591fe2a2 100644 --- a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java @@ -18,12 +18,8 @@ package org.apache.hadoop.hive.common; -import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.IOException; -import java.io.PrintStream; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; import java.net.URLClassLoader; import java.util.Arrays; import java.util.List; @@ -38,22 +34,6 @@ import org.slf4j.LoggerFactory; */ public final class JavaUtils { private static final Logger LOG = LoggerFactory.getLogger(JavaUtils.class); - private static final Method SUN_MISC_UTIL_RELEASE; - - static { -if (Closeable.class.isAssignableFrom(URLClassLoader.class)) { - SUN_MISC_UTIL_RELEASE = null; -} else { - Method release = null; - try { -Class clazz = Class.forName("sun.misc.ClassLoaderUtil"); -release = clazz.getMethod("releaseLoader", URLClassLoader.class); - } catch (Exception e) { -// ignore - } - SUN_MISC_UTIL_RELEASE = release; -} - } /** * Standard way of getting classloader in Hive code (outside of Hadoop). @@ -91,8 +71,10 @@ public final class JavaUtils { try { closeClassLoader(current); } catch (IOException e) { -LOG.info("Failed to close class loader " + current + -Arrays.toString(((URLClassLoader) current).getURLs()), e); +String detailedMessage = current instanceof URLClassLoader ? +Arrays.toString(((URLClassLoader) current).getURLs()) : +""; +LOG.info("Failed to close class loader " + current + " " + detailedMessage, e); } } return true; @@ -108,35 +90,12 @@ public final class JavaUtils { return current == stop; } - // best effort to close - // see https://issues.apache.org/jira/browse/HIVE-3969 for detail public static void closeClassLoader(ClassLoader loader) throws IOException { if (loader instanceof Closeable) { - ((Closeable)loader).close(); -} else if (SUN_MISC_UTIL_RELEASE != null && loader instanceof URLClassLoader) { - PrintStream outputStream = System.out; - ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutput
[hive] branch branch-3.1 updated: HIVE-27508 : Backport of HIVE-21584 to branch-3.1 (Aman Raj, reviewed by Vihang Karajgaonkar)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-3.1 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3.1 by this push: new af7059e2bdc HIVE-27508 : Backport of HIVE-21584 to branch-3.1 (Aman Raj, reviewed by Vihang Karajgaonkar) af7059e2bdc is described below commit af7059e2bdc8b18af42e0b7f7163b923a0bfd424 Author: Aman Raj <104416558+amanraj2...@users.noreply.github.com> AuthorDate: Fri Aug 4 11:59:18 2023 +0530 HIVE-27508 : Backport of HIVE-21584 to branch-3.1 (Aman Raj, reviewed by Vihang Karajgaonkar) --- .../src/java/org/apache/hive/beeline/Commands.java | 2 +- .../org/apache/hadoop/hive/common/JavaUtils.java | 57 ++--- .../hive/llap/daemon/impl/FunctionLocalizer.java | 18 ++- .../hadoop/hive/ql/exec/AddToClassPathAction.java | 87 + .../org/apache/hadoop/hive/ql/exec/Utilities.java | 98 +++--- .../apache/hadoop/hive/ql/exec/mr/ExecDriver.java | 7 +- .../apache/hadoop/hive/ql/exec/mr/ExecMapper.java | 20 +-- .../apache/hadoop/hive/ql/exec/mr/ExecReducer.java | 20 +-- .../hive/ql/exec/spark/SparkRecordHandler.java | 29 ++--- .../hadoop/hive/ql/exec/tez/RecordProcessor.java | 16 +-- .../hadoop/hive/ql/session/SessionState.java | 57 + .../hive/ql/exec/TestAddToClassPathAction.java | 142 + .../hive/spark/client/SparkClientUtilities.java| 23 +++- .../hive/metastore/utils/MetaStoreUtils.java | 20 ++- 14 files changed, 386 insertions(+), 210 deletions(-) diff --git a/beeline/src/java/org/apache/hive/beeline/Commands.java b/beeline/src/java/org/apache/hive/beeline/Commands.java index 851042f3f9d..f14564a81ac 100644 --- a/beeline/src/java/org/apache/hive/beeline/Commands.java +++ b/beeline/src/java/org/apache/hive/beeline/Commands.java @@ -169,7 +169,7 @@ public class Commands { return false; } -URLClassLoader classLoader = (URLClassLoader) Thread.currentThread().getContextClassLoader(); +ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); try { beeLine.debug(jarPath + " is added to the local beeline."); URLClassLoader newClassLoader = new URLClassLoader(new URL[]{p.toURL()}, classLoader); diff --git a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java index 54d6b65fa17..93f591fe2a2 100644 --- a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java @@ -18,12 +18,8 @@ package org.apache.hadoop.hive.common; -import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.IOException; -import java.io.PrintStream; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; import java.net.URLClassLoader; import java.util.Arrays; import java.util.List; @@ -38,22 +34,6 @@ import org.slf4j.LoggerFactory; */ public final class JavaUtils { private static final Logger LOG = LoggerFactory.getLogger(JavaUtils.class); - private static final Method SUN_MISC_UTIL_RELEASE; - - static { -if (Closeable.class.isAssignableFrom(URLClassLoader.class)) { - SUN_MISC_UTIL_RELEASE = null; -} else { - Method release = null; - try { -Class clazz = Class.forName("sun.misc.ClassLoaderUtil"); -release = clazz.getMethod("releaseLoader", URLClassLoader.class); - } catch (Exception e) { -// ignore - } - SUN_MISC_UTIL_RELEASE = release; -} - } /** * Standard way of getting classloader in Hive code (outside of Hadoop). @@ -91,8 +71,10 @@ public final class JavaUtils { try { closeClassLoader(current); } catch (IOException e) { -LOG.info("Failed to close class loader " + current + -Arrays.toString(((URLClassLoader) current).getURLs()), e); +String detailedMessage = current instanceof URLClassLoader ? +Arrays.toString(((URLClassLoader) current).getURLs()) : +""; +LOG.info("Failed to close class loader " + current + " " + detailedMessage, e); } } return true; @@ -108,35 +90,12 @@ public final class JavaUtils { return current == stop; } - // best effort to close - // see https://issues.apache.org/jira/browse/HIVE-3969 for detail public static void closeClassLoader(ClassLoader loader) throws IOException { if (loader instanceof Closeable) { - ((Closeable)loader).close(); -} else if (SUN_MISC_UTIL_RELEASE != null && loader instanceof URLClassLoader) { - PrintStream outputStream = System.out; - ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); - PrintStream newOutputStream = new PrintStream(byteArrayOutpu
[hive] branch branch-3 updated: Hive-27371: Create nightly builds on branch-3 (Junlin Zeng, reviewed by Vihang Karajgaonkar)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3 by this push: new 580fe2cc654 Hive-27371: Create nightly builds on branch-3 (Junlin Zeng, reviewed by Vihang Karajgaonkar) 580fe2cc654 is described below commit 580fe2cc654423a8d9c6b58261e174c4ed559959 Author: Junlin Zeng <69210845+junlinzeng...@users.noreply.github.com> AuthorDate: Thu May 25 08:36:40 2023 -0700 Hive-27371: Create nightly builds on branch-3 (Junlin Zeng, reviewed by Vihang Karajgaonkar) --- Jenkinsfile | 15 dev-support/nightly | 47 hcatalog/core/pom.xml| 2 +- hcatalog/webhcat/java-client/pom.xml | 2 +- hcatalog/webhcat/svr/pom.xml | 2 +- llap-server/pom.xml | 2 +- metastore/pom.xml| 2 +- pom.xml | 7 ++ ql/pom.xml | 2 +- service/pom.xml | 2 +- standalone-metastore/pom.xml | 7 ++ storage-api/pom.xml | 6 + upgrade-acid/pom.xml | 6 + 13 files changed, 95 insertions(+), 7 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 5c5c239e4f5..a095a5a7b81 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -214,6 +214,21 @@ jobWrappers { } } } + +branches['nightly-check'] = { +executorNode { + stage('Prepare') { + loadWS(); + } + stage('Build') { + sh '''#!/bin/bash + set -e + dev-support/nightly + ''' + buildHive("install -Dtest=noMatches -Pdist -pl packaging -am") + } +} +} parallel branches } } diff --git a/dev-support/nightly b/dev-support/nightly new file mode 100755 index 000..20c83b7e8ad --- /dev/null +++ b/dev-support/nightly @@ -0,0 +1,47 @@ +#!/bin/bash +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e + +DATE="`date +%Y%m%d_%H%M%S`" +HASH="`git rev-parse --short HEAD`" +SUFFIX="nightly-$HASH-$DATE" + +V="`xmlstarlet sel -t -m /_:project/_:version -v . pom.xml`" +NEW_HIVE="${NEW_HIVE:-${V/-*}-$SUFFIX}" +V="`xmlstarlet sel -t -m /_:project/_:version -v . storage-api/pom.xml`" +NEW_SA="${NEW_SA:-${V/-*}-$SUFFIX}" +V="`xmlstarlet sel -t -m /_:project/_:version -v . standalone-metastore/pom.xml`" +NEW_MS="${NEW_MS:-${V/-*}-$SUFFIX}" + + +mvn_versions_set="mvn versions:set versions:commit -DgenerateBackupPoms=false" + +$mvn_versions_set -B -DnewVersion=$NEW_HIVE +$mvn_versions_set -B -DnewVersion=$NEW_SA -pl storage-api +$mvn_versions_set -B -DnewVersion=$NEW_MS -pl standalone-metastore +$mvn_versions_set -B -DnewVersion=$NEW_MS -pl upgrade-acid + +xmlstarlet edit -L -P --update "/_:project/_:properties/_:hive.version" \ + --value $NEW_HIVE standalone-metastore/pom.xml + +xmlstarlet edit -L -P --update "/_:project/_:properties/_:storage-api.version" \ + --value $NEW_SA pom.xml standalone-metastore/pom.xml + +xmlstarlet edit -L -P --update "/_:project/_:properties/_:standalone-metastore.version" \ + --value $NEW_MS pom.xml + + diff --git a/hcatalog/core/pom.xml b/hcatalog/core/pom.xml index 0c454e74300..20ec0461774 100644 --- a/hcatalog/core/pom.xml +++ b/hcatalog/core/pom.xml @@ -70,7 +70,7 @@ org.apache.hive hive-standalone-metastore - ${project.version} + ${standalone-metastore.version} test-jar test diff --git a/hcatalog/webhcat/java-client/pom.xml b/hcatalog/webhcat/java-client/pom.xml index 200b6e1fab7..d41f19c2c83 100644 --- a/hcatalog/webhcat/java-client/pom.xml +++ b/hcatalog/webhcat/java-client/pom.xml @@ -83,7 +83,7 @@ org.apache.hive hive-standalone-metastore - ${project.version} + ${standalone-metastore.version} test-jar test diff --git a/hcatalog/webhcat/svr/pom.xml b/hcatalog/webhcat/sv
[hive] branch branch-3 updated: HIVE-27288 : Backport of HIVE-23262 to branch-3 (Aman Raj reviewed by Vihang Karajgaonkar)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3 by this push: new 7950336706c HIVE-27288 : Backport of HIVE-23262 to branch-3 (Aman Raj reviewed by Vihang Karajgaonkar) 7950336706c is described below commit 7950336706cecabda46a7f9881ee90c3dd315304 Author: Aman Raj <104416558+amanraj2...@users.noreply.github.com> AuthorDate: Fri May 5 05:19:40 2023 +0530 HIVE-27288 : Backport of HIVE-23262 to branch-3 (Aman Raj reviewed by Vihang Karajgaonkar) (#4261) --- hcatalog/conf/jndi.properties | 36 --- hcatalog/server-extensions/pom.xml | 18 -- .../hcatalog/listener/TestMsgBusConnection.java| 120 - .../listener/TestNotificationListener.java | 270 - pom.xml| 17 -- 5 files changed, 461 deletions(-) diff --git a/hcatalog/conf/jndi.properties b/hcatalog/conf/jndi.properties deleted file mode 100644 index f718111242d..000 --- a/hcatalog/conf/jndi.properties +++ /dev/null @@ -1,36 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# If ActiveMQ is used then uncomment following properties, else substitute it accordingly. -#java.naming.factory.initial = org.apache.activemq.jndi.ActiveMQInitialContextFactory - -# use the following property to provide location of MQ broker. -#java.naming.provider.url = tcp://localhost:61616 - -# use the following property to specify the JNDI name the connection factory -# should appear as. -#connectionFactoryNames = connectionFactory, queueConnectionFactory, topicConnectionFactry - -# register some queues in JNDI using the form -# queue.[jndiName] = [physicalName] -# queue.MyQueue = example.MyQueue - - -# register some topics in JNDI using the form -# topic.[jndiName] = [physicalName] -# topic.MyTopic = example.MyTopic - diff --git a/hcatalog/server-extensions/pom.xml b/hcatalog/server-extensions/pom.xml index 1fcc5698d8a..afabb3e6bd8 100644 --- a/hcatalog/server-extensions/pom.xml +++ b/hcatalog/server-extensions/pom.xml @@ -93,24 +93,6 @@ ${junit.version} test - - org.apache.activemq - activemq-core - ${activemq.version} - test - - - org.springframework - spring-context - - - - - org.apache.activemq - kahadb - ${activemq.version} - test - org.apache.pig pig diff --git a/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java b/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java deleted file mode 100644 index 729a5e7f620..000 --- a/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.hive.hcatalog.listener; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import javax.jms.Connection; -import javax.jms.ConnectionFactory; -import javax.jms.Destination; -import javax.jms.JMSException; -import javax.jms.Messa
[hive] branch branch-3 updated (655b88621b5 -> 7b2b35a4ead)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 655b88621b5 HIVE-27261: Backport HIVE-27258 to branch-3 (Vihang Karajgaonkar, reviewed by Junlin Zeng and Aman Raj) add 7b2b35a4ead HIVE-27282 : Backport of HIVE-21717 : Rename is failing for directory in move task (Aman Raj reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../org/apache/hadoop/hive/ql/metadata/Hive.java | 40 -- 1 file changed, 15 insertions(+), 25 deletions(-)
[hive] branch branch-3 updated (cd4e37e29d6 -> 655b88621b5)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from cd4e37e29d6 HIVE-27249: Backport HIVE-23619 to branch-3 (Aman Raj reviewed by Vihang Karajgaonkar) add 655b88621b5 HIVE-27261: Backport HIVE-27258 to branch-3 (Vihang Karajgaonkar, reviewed by Junlin Zeng and Aman Raj) No new revisions were added by this update. Summary of changes: .../java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java| 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-)
[hive] branch branch-3 updated (55bcdb8c210 -> cd4e37e29d6)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 55bcdb8c210 HIVE-27250 : Backport of HIVE-22599 to branch-3 (Aman Raj reviewed by Vihang Karajgaonkar) add cd4e37e29d6 HIVE-27249: Backport HIVE-23619 to branch-3 (Aman Raj reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../java/org/apache/hadoop/hive/conf/HiveConf.java | 5 +- .../TestReExecuteKilledTezAMQueryPlugin.java | 185 + .../org/apache/hadoop/hive/ql/DriverFactory.java | 4 + ...Plugin.java => ReExecuteLostAMQueryPlugin.java} | 62 +++ 4 files changed, 217 insertions(+), 39 deletions(-) create mode 100644 itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/reexec/TestReExecuteKilledTezAMQueryPlugin.java copy ql/src/java/org/apache/hadoop/hive/ql/reexec/{ReExecutionOverlayPlugin.java => ReExecuteLostAMQueryPlugin.java} (66%)
[hive] branch branch-3 updated (acca2b0ffeb -> 55bcdb8c210)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from acca2b0ffeb HIVE-27256 : Backport HIVE-24020 to branch-3 (Aman Raj reviewed by Vihang Karajgaonkar) add 55bcdb8c210 HIVE-27250 : Backport of HIVE-22599 to branch-3 (Aman Raj reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java | 2 -- 1 file changed, 2 deletions(-)
[hive] branch branch-3 updated (8ed723cac8b -> acca2b0ffeb)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 8ed723cac8b HIVE-27220: Backport Upgrade commons,httpclient,jackson,jetty,log4j binaries from branch-3.1 (Naveen Gangam, Apoorva Aggarwal, reviewed by Aman Raj) add acca2b0ffeb HIVE-27256 : Backport HIVE-24020 to branch-3 (Aman Raj reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../hive/ql/txn/compactor/TestCompactor.java | 122 + .../hive/streaming/AbstractRecordWriter.java | 23 ++-- 2 files changed, 131 insertions(+), 14 deletions(-)
[hive] branch master updated (a029c5fd042 -> 4567681988a)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git from a029c5fd042 HIVE-27145: Use StrictMath for remaining Math functions as followup of HIVE-23133 (Himanshu Mishra, reviewed by Krisztian Kasa, Syed Shameerur Rahman) add 4567681988a HIVE-27258: Remove usage of Splitter.splitToList in HiveMetaStoreClient HTTP client (Vihang Karajgaonkar, reviewed by Junlin Zeng, Zhihua Deng) No new revisions were added by this update. Summary of changes: .../java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java| 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-)
[hive] branch branch-3 updated (dcec0c107de -> 6b7cc14d292)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from dcec0c107de HIVE-27211: Backport HIVE-22453 to branch-3 (Nikhil Gupta, reviewed by Aman Raj, Vihang Karajgaonkar) add 6b7cc14d292 HIVE-27216: Upgrade postgresql to 42.5.1 from 9.x in branch-3 (Aman Raj reviewed by Vihang Karajgaonkar and Stamatis Zampetakis) No new revisions were added by this update. Summary of changes: beeline/pom.xml| 4 ++-- .../org/apache/hive/beeline/TestBeelineArgParsing.java | 14 +- pom.xml| 1 + standalone-metastore/pom.xml | 3 ++- 4 files changed, 14 insertions(+), 8 deletions(-)
[hive] branch branch-3 updated (f4c1302811f -> dcec0c107de)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from f4c1302811f HIVE-27202: Disable flaky test testComplexQuery (Vihang Karajgaonkar, reviewed by Aman Raj) add dcec0c107de HIVE-27211: Backport HIVE-22453 to branch-3 (Nikhil Gupta, reviewed by Aman Raj, Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../org/apache/hadoop/hive/ql/exec/DDLTask.java| 50 +++--- 1 file changed, 26 insertions(+), 24 deletions(-)
[hive] branch branch-3 updated (51c56772640 -> f4c1302811f)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 51c56772640 HIVE-27136: Backport HIVE-27129 to branch-3 (Junlin Zeng reviewed by Vihang Karajgaonkar) add f4c1302811f HIVE-27202: Disable flaky test testComplexQuery (Vihang Karajgaonkar, reviewed by Aman Raj) No new revisions were added by this update. Summary of changes: .../src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java| 2 ++ 1 file changed, 2 insertions(+)
[hive] branch branch-3 updated (97bae130539 -> 51c56772640)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 97bae130539 HIVE-26949: Backport HIVE-26071 to branch-3 (Junlin Zeng reviewed by Vihang Karajgaonkar) add 51c56772640 HIVE-27136: Backport HIVE-27129 to branch-3 (Junlin Zeng reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../hadoop/hive/metastore/HiveMetaStoreClient.java | 110 ++--- .../hadoop/hive/metastore/conf/MetastoreConf.java | 3 + .../metastore/TestHiveMetastoreHttpHeaders.java| 108 3 files changed, 184 insertions(+), 37 deletions(-) create mode 100644 standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetastoreHttpHeaders.java
[hive] branch branch-3 updated (8caadd75ea0 -> 97bae130539)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 8caadd75ea0 HIVE-27033 : Backport of HIVE-23044 (Aman Raj, reviewed by Vihang Karajgaonkar) add 97bae130539 HIVE-26949: Backport HIVE-26071 to branch-3 (Junlin Zeng reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: standalone-metastore/pom.xml | 19 ++ .../hadoop/hive/metastore/HiveMetaStore.java | 2 +- .../hadoop/hive/metastore/HiveMetaStoreClient.java | 60 +++-- .../hive/metastore/HmsThriftHttpServlet.java | 156 +++ .../auth/HttpAuthenticationException.java | 6 +- .../hive/metastore/auth/jwt/JWTValidator.java | 109 .../metastore/auth/jwt/URLBasedJWKSProvider.java | 88 +++ .../hadoop/hive/metastore/conf/MetastoreConf.java | 12 +- .../TestRemoteHiveMetastoreWithHttpJwt.java| 284 + .../resources/auth/jwt/jwt-authorized-key.json | 12 + .../resources/auth/jwt/jwt-unauthorized-key.json | 12 + .../resources/auth/jwt/jwt-verification-jwks.json | 20 ++ 12 files changed, 708 insertions(+), 72 deletions(-) copy {service/src/java/org/apache/hive/service => standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore}/auth/HttpAuthenticationException.java (88%) create mode 100644 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/auth/jwt/JWTValidator.java create mode 100644 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/auth/jwt/URLBasedJWKSProvider.java create mode 100644 standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetastoreWithHttpJwt.java create mode 100644 standalone-metastore/src/test/resources/auth/jwt/jwt-authorized-key.json create mode 100644 standalone-metastore/src/test/resources/auth/jwt/jwt-unauthorized-key.json create mode 100644 standalone-metastore/src/test/resources/auth/jwt/jwt-verification-jwks.json
[hive] branch branch-3 updated (d4eaef83eb6 -> 8caadd75ea0)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from d4eaef83eb6 HIVE-21755: Backport HIVE-21462 to branch-3 (Aman Raj, reviewed by Vihang Karajgaonkar) add 8caadd75ea0 HIVE-27033 : Backport of HIVE-23044 (Aman Raj, reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Cleaner.java| 6 +++--- .../test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-)
[hive] branch branch-3 updated (167806919e2 -> d4eaef83eb6)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 167806919e2 HIVE-26948: Backport HIVE-21456 to branch-3 (Vihang Karajgaonkar reviewed by Sourabh Goyal, Aman Raj) add d4eaef83eb6 HIVE-21755: Backport HIVE-21462 to branch-3 (Aman Raj, reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../sql/mssql/upgrade-2.1.0-to-2.2.0.mssql.sql | 47 -- .../sql/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql | 24 ++- .../sql/mssql/upgrade-3.0.0-to-3.1.0.mssql.sql | 30 +- 3 files changed, 78 insertions(+), 23 deletions(-)
[hive] branch branch-3 updated (95b083a77c5 -> 167806919e2)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 95b083a77c5 HIVE-27174 : Disable sysdb.q (Aman Raj reviewed by Vihang Karajgaonkar) add 167806919e2 HIVE-26948: Backport HIVE-21456 to branch-3 (Vihang Karajgaonkar reviewed by Sourabh Goyal, Aman Raj) No new revisions were added by this update. Summary of changes: .../java/org/hadoop/hive/jdbc/SSLTestUtils.java| 7 + .../test/java/org/apache/hive/jdbc/TestSSL.java| 57 + pom.xml| 4 +- standalone-metastore/pom.xml | 6 + .../hadoop/hive/metastore/HiveMetaStore.java | 255 +++-- .../hadoop/hive/metastore/HiveMetaStoreClient.java | 250 +--- .../hive/metastore/HmsThriftHttpServlet.java | 113 + .../hadoop/hive/metastore/conf/MetastoreConf.java | 75 ++ .../hive/metastore/utils/MetaStoreUtils.java | 24 ++ .../hadoop/hive/metastore/utils/SecurityUtils.java | 53 - ...erver.java => TestRemoteHiveHttpMetaStore.java} | 24 +- .../hive/metastore/TestRemoteHiveMetaStore.java| 3 + .../src/test/resources/log4j2.properties | 2 +- 13 files changed, 763 insertions(+), 110 deletions(-) create mode 100644 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HmsThriftHttpServlet.java copy standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/{TestSetUGIOnOnlyServer.java => TestRemoteHiveHttpMetaStore.java} (63%)
[hive] branch branch-3 updated (1e8274fb891 -> 95b083a77c5)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 1e8274fb891 HIVE-26905: Backport HIVE-25173 to 3.2.0: Exclude pentaho-aggdesigner-algorithm from upgrade-acid (Chris Nauroth reviewed by Stamatis Zampetakis) add 95b083a77c5 HIVE-27174 : Disable sysdb.q (Aman Raj reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java | 1 + 1 file changed, 1 insertion(+)
[hive] branch branch-3 updated (f8fc208b4c2 -> 0fcb9b9a497)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from f8fc208b4c2 HIVE-27154: Backport HIVE-20953 to branch-3 (Vihang Karajgaonkar reviewed by Aman Raj) add 0fcb9b9a497 HIVE-20897 : TestJdbcDriver2#testSelectExecAsync2 fails with result set not present error (Mahesh Kumar Behera reviewed by Daniel Dai, Sankar Hariappan (#4153) No new revisions were added by this update. Summary of changes: .../src/java/org/apache/hive/service/cli/OperationStatus.java | 11 --- .../java/org/apache/hive/service/cli/operation/Operation.java | 4 ++-- .../org/apache/hive/service/cli/thrift/ThriftCLIService.java | 4 +++- .../hive/service/cli/thrift/ThriftCLIServiceClient.java | 2 +- 4 files changed, 14 insertions(+), 7 deletions(-)
[hive] branch branch-3 updated (e3965372343 -> f8fc208b4c2)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from e3965372343 HIVE-27171: Backport HIVE-20680 to branch-3 (Vihang Karajgaonkar reviewed by Aman Raj) add f8fc208b4c2 HIVE-27154: Backport HIVE-20953 to branch-3 (Vihang Karajgaonkar reviewed by Aman Raj) No new revisions were added by this update. Summary of changes: .../TestReplicationScenariosAcrossInstances.java | 102 +++-- .../apache/hadoop/hive/ql/exec/FunctionTask.java | 11 ++- 2 files changed, 103 insertions(+), 10 deletions(-)
[hive] branch branch-3 updated (79a6944bec7 -> e3965372343)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 79a6944bec7 HIVE-27152: Revert "HIVE-21388: Constant UDF is not pushed to JDBCStorage Handler" (Aman Raj, reviewed by Vihang Karajgaonkar) add e3965372343 HIVE-27171: Backport HIVE-20680 to branch-3 (Vihang Karajgaonkar reviewed by Aman Raj) No new revisions were added by this update. Summary of changes: .../java/org/apache/hadoop/hive/conf/HiveConf.java | 3 ++ .../TestReplicationScenariosAcrossInstances.java | 51 ++ .../hadoop/hive/ql/exec/repl/ReplLoadTask.java | 9 .../events/filesystem/DatabaseEventsIterator.java | 49 - .../metastore/InjectableBehaviourObjectStore.java | 24 ++ 5 files changed, 135 insertions(+), 1 deletion(-)
[hive] branch branch-3 updated (1a704b422cd -> 79a6944bec7)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 1a704b422cd HIVE-27151: Revert "HIVE-21685: Wrong simplification in query with multiple IN clauses" (Aman Raj, reviewed by Vihang Karajgaonkar) add 79a6944bec7 HIVE-27152: Revert "HIVE-21388: Constant UDF is not pushed to JDBCStorage Handler" (Aman Raj, reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../hive/ql/optimizer/calcite/HiveCalciteUtil.java | 6 +-- .../calcite/reloperators/HiveSqlFunction.java | 58 -- .../calcite/translator/SqlFunctionConverter.java | 37 +++--- .../clientpositive/current_date_timestamp.q| 2 - .../llap/current_date_timestamp.q.out | 14 +- 5 files changed, 33 insertions(+), 84 deletions(-) delete mode 100644 ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveSqlFunction.java
[hive] branch branch-3 updated (2b7b28bf70e -> 1a704b422cd)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 2b7b28bf70e Disable TestJdbcGenericUDTFGetSplits (Vihang Karajgaonkar, reviewed by Aman Raj) add 1a704b422cd HIVE-27151: Revert "HIVE-21685: Wrong simplification in query with multiple IN clauses" (Aman Raj, reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../test/resources/testconfiguration.properties| 1 - .../rules/HivePointLookupOptimizerRule.java| 6 .../test/queries/clientpositive/multi_in_clause.q | 7 .../clientpositive/llap/multi_in_clause.q.out | 40 -- 4 files changed, 54 deletions(-) delete mode 100644 ql/src/test/queries/clientpositive/multi_in_clause.q delete mode 100644 ql/src/test/results/clientpositive/llap/multi_in_clause.q.out
[hive] branch branch-3 updated (96acf4a0ab9 -> 2b7b28bf70e)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 96acf4a0ab9 Revert "HIVE-20182: Backport HIVE-20067 to branch-3 (Daniel Voros via Zoltan Haindrich)" (Aman Raj reviewed by Vihang Karajgaonkar) ) add 2b7b28bf70e Disable TestJdbcGenericUDTFGetSplits (Vihang Karajgaonkar, reviewed by Aman Raj) No new revisions were added by this update. Summary of changes: .../test/java/org/apache/hive/jdbc/TestJdbcGenericUDTFGetSplits.java| 2 ++ 1 file changed, 2 insertions(+)
[hive] branch branch-3 updated (0e0adeee35d -> 96acf4a0ab9)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 0e0adeee35d HIVE-27087: Fix TestMiniSparkOnYarnCliDriver test failures on branch-3 (Vihang Karajgaonkar, reviewed by Sankar Hariappan and Aman Raj) add 96acf4a0ab9 Revert "HIVE-20182: Backport HIVE-20067 to branch-3 (Daniel Voros via Zoltan Haindrich)" (Aman Raj reviewed by Vihang Karajgaonkar) ) No new revisions were added by this update. Summary of changes: ql/src/test/queries/clientpositive/mm_all.q | 1 - .../org/apache/hadoop/hive/metastore/events/InsertEvent.java | 9 - 2 files changed, 4 insertions(+), 6 deletions(-)
[hive] branch branch-3 updated (f1db3f25a7d -> 0e0adeee35d)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from f1db3f25a7d HIVE-27096: Update dbtxnmgr_showlocks.q.out file (Aman Raj reviewed by Stamatis Zampetakis) add 0e0adeee35d HIVE-27087: Fix TestMiniSparkOnYarnCliDriver test failures on branch-3 (Vihang Karajgaonkar, reviewed by Sankar Hariappan and Aman Raj) No new revisions were added by this update. Summary of changes: itests/qtest-spark/pom.xml | 10 +--- .../apache/hadoop/hive/cli/control/CliConfigs.java | 13 + pom.xml| 2 +- .../hadoop/hive/metastore/conf/MetastoreConf.java | 9 +-- .../hadoop/hive/metastore/metrics/Metrics.java | 30 +++--- .../hadoop/hive/metastore/metrics/TestMetrics.java | 44 ++- .../metastore/testutils/CapturingLogAppender.java | 65 -- .../src/test/resources/log4j2.properties | 8 +-- 8 files changed, 31 insertions(+), 150 deletions(-) delete mode 100644 standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/testutils/CapturingLogAppender.java
[hive] branch master updated: HIVE-27129: Add enhanced support for Hive Metastore Client http support (Junlin Zeng, reviewed by Vihang Karajgaonkar)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 63a02465957 HIVE-27129: Add enhanced support for Hive Metastore Client http support (Junlin Zeng, reviewed by Vihang Karajgaonkar) 63a02465957 is described below commit 63a02465957abe5311d287e79cd9cb949170dedf Author: Junlin Zeng <69210845+junlinzeng...@users.noreply.github.com> AuthorDate: Fri Mar 10 13:14:43 2023 -0800 HIVE-27129: Add enhanced support for Hive Metastore Client http support (Junlin Zeng, reviewed by Vihang Karajgaonkar) (Closes #4104) --- .../hadoop/hive/metastore/HiveMetaStoreClient.java | 95 -- .../hadoop/hive/metastore/conf/MetastoreConf.java | 3 + .../metastore/TestHiveMetastoreHttpHeaders.java| 108 + 3 files changed, 176 insertions(+), 30 deletions(-) diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 50412dd17be..e229ab1a0b4 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -46,6 +46,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; +import java.util.Objects; import java.util.Optional; import java.util.Random; import java.util.concurrent.TimeUnit; @@ -54,6 +55,7 @@ import java.util.concurrent.atomic.AtomicInteger; import javax.security.auth.login.LoginException; import com.google.common.base.Preconditions; +import com.google.common.base.Splitter; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.classification.InterfaceAudience; @@ -621,6 +623,24 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable { return transport; } + private Map getAdditionalHeaders() { +Map headers = new HashMap<>(); +String keyValuePairs = MetastoreConf.getVar(conf, +ConfVars.METASTORE_CLIENT_ADDITIONAL_HEADERS); +try { + List headerKeyValues = + Splitter.on(',').trimResults().splitToList(keyValuePairs); + for (String header : headerKeyValues) { +String[] parts = header.split("="); +headers.put(parts[0].trim(), parts[1].trim()); + } +} catch (Exception ex) { + LOG.warn("Could not parse the headers provided in " + + ConfVars.METASTORE_CLIENT_ADDITIONAL_HEADERS, ex); +} +return headers; + } + /* Creates a THttpClient if HTTP mode is enabled. If Client auth mode is set to JWT, then the method fetches JWT from environment variable: HMS_JWT and sets in auth @@ -629,10 +649,47 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable { private THttpClient createHttpClient(URI store, boolean useSSL) throws MetaException, TTransportException { String path = MetaStoreUtils.getHttpPath(MetastoreConf.getVar(conf, ConfVars.THRIFT_HTTP_PATH)); -String httpUrl = (useSSL ? "https://; : "http://;) + store.getHost() + ":" + store.getPort() + path; +String urlScheme; +if (useSSL || Objects.equals(store.getScheme(), "https")) { + urlScheme = "https://;; +} else { + urlScheme = "http://;; +} +String httpUrl = urlScheme + store.getHost() + ":" + store.getPort() + path; + +HttpClientBuilder httpClientBuilder = createHttpClientBuilder(); +THttpClient tHttpClient; +try { + if (useSSL) { +String trustStorePath = MetastoreConf.getVar(conf, ConfVars.SSL_TRUSTSTORE_PATH).trim(); +if (trustStorePath.isEmpty()) { + throw new IllegalArgumentException(ConfVars.SSL_TRUSTSTORE_PATH + " Not configured for SSL connection"); +} +String trustStorePassword = MetastoreConf.getPassword(conf, MetastoreConf.ConfVars.SSL_TRUSTSTORE_PASSWORD); +String trustStoreType = MetastoreConf.getVar(conf, ConfVars.SSL_TRUSTSTORE_TYPE).trim(); +String trustStoreAlgorithm = MetastoreConf.getVar(conf, ConfVars.SSL_TRUSTMANAGERFACTORY_ALGORITHM).trim(); +tHttpClient = +SecurityUtils.getThriftHttpsClient(httpUrl, trustStorePath, trustStorePassword, trustStoreAlgorithm, +trustStoreType, httpClientBuilder); + } else { +tHttpClient = new THttpClient(httpUrl, httpClientBuilder.build()); + } +} catch (Exception e) { + if (e instanceof TTransportException) { +throw (TTransportException) e; + } else { +throw new
[hive] branch branch-3 updated (f205a94c39b -> 5afa24ac144)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from f205a94c39b HIVE-27090 : Test fix for external_jdbc_auth.q (Aman Raj reviewed by Vihang Karajgaonkar) add 5afa24ac144 HIVE-27062: Disable flaky test TestRpc (Vihang Karajgaonkar, reviewed by Stamatis Zampetakis) No new revisions were added by this update. Summary of changes: spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java | 1 + 1 file changed, 1 insertion(+)
[hive] branch branch-3 updated (422fd4e48bd -> f205a94c39b)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git from 422fd4e48bd HIVE-27017: backport HIVE-26771 option to use createTable DDLTask in CTAS for StorgeHandler (#4014) add f205a94c39b HIVE-27090 : Test fix for external_jdbc_auth.q (Aman Raj reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-)
[hive] branch master updated: HIVE-25796: Allow metastore clients to fetch remaining events if some of the events are cleaned up (Vihang Karajgaonkar, reviewed by Sourabh Goyal)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new d034138 HIVE-25796: Allow metastore clients to fetch remaining events if some of the events are cleaned up (Vihang Karajgaonkar, reviewed by Sourabh Goyal) d034138 is described below commit d034138b976c48930e5973d179c94ea1c2a6130f Author: Vihang Karajgaonkar <27933586+vihan...@users.noreply.github.com> AuthorDate: Fri Dec 17 10:46:00 2021 -0800 HIVE-25796: Allow metastore clients to fetch remaining events if some of the events are cleaned up (Vihang Karajgaonkar, reviewed by Sourabh Goyal) --- .../listener/TestDbNotificationListener.java | 38 ++ .../hadoop/hive/metastore/HiveMetaStoreClient.java | 17 +- .../hadoop/hive/metastore/IMetaStoreClient.java| 21 .../metastore/HiveMetaStoreClientPreCatalog.java | 19 +-- 4 files changed, 92 insertions(+), 3 deletions(-) diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java index 7973f0a..100ee24 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java @@ -19,6 +19,7 @@ package org.apache.hive.hcatalog.listener; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; @@ -42,6 +43,7 @@ import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.IMetaStoreClient.NotificationFilter; import org.apache.hadoop.hive.metastore.MetaStoreEventListener; import org.apache.hadoop.hive.metastore.MetaStoreEventListenerConstants; import org.apache.hadoop.hive.metastore.TableType; @@ -55,6 +57,7 @@ import org.apache.hadoop.hive.metastore.api.FunctionType; import org.apache.hadoop.hive.metastore.api.InsertEventRequestData; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.NotificationEventRequest; import org.apache.hadoop.hive.metastore.api.NotificationEventResponse; import org.apache.hadoop.hive.metastore.api.NotificationEventsCountRequest; import org.apache.hadoop.hive.metastore.api.Partition; @@ -1681,6 +1684,41 @@ public class TestDbNotificationListener { assertEquals(0, rsp2.getEventsSize()); } + /** + * Test makes sure that if you use the API {@link HiveMetaStoreClient#getNextNotification(NotificationEventRequest, boolean, NotificationFilter)} + * does not error out if the events are cleanedup. + */ + @Test + public void skipCleanedUpEvents() throws Exception { +Database db = new Database("cleanup1", "no description", testTempDir, emptyParameters); +msClient.createDatabase(db); +msClient.dropDatabase("cleanup1"); + +// sleep for expiry time, and then fetch again +// sleep twice the TTL interval - things should have been cleaned by then. +Thread.sleep(EVENTS_TTL * 2 * 1000); + +db = new Database("cleanup2", "no description", testTempDir, emptyParameters); +msClient.createDatabase(db); +msClient.dropDatabase("cleanup2"); + +// the firstEventId is before the cleanup happened, so we should just receive the +// events which remaining after cleanup. +NotificationEventRequest request = new NotificationEventRequest(); +request.setLastEvent(firstEventId); +request.setMaxEvents(-1); +NotificationEventResponse rsp2 = msClient.getNextNotification(request, true, null); +assertEquals(2, rsp2.getEventsSize()); +// when we pass the allowGapsInEvents as false the API should error out +Exception ex = null; +try { + NotificationEventResponse rsp = msClient.getNextNotification(request, false, null); +} catch (Exception e) { + ex = e; +} +assertNotNull(ex); + } + @Test public void cleanupNotificationWithError() throws Exception { Database db = new Database("cleanup1", "no description", testTempDir, emptyParameters); diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClie
[hive] branch branch-3.1 updated: Hive-24741 backport to 3.1 (Nilesh Salian, reviewed by Vihang Karajgaonkar)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-3.1 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3.1 by this push: new dd7ffab Hive-24741 backport to 3.1 (Nilesh Salian, reviewed by Vihang Karajgaonkar) dd7ffab is described below commit dd7ffab75f550446e5d2d49ce91e03e428a16c3a Author: Neelesh Srinivas Salian AuthorDate: Fri Oct 29 10:21:30 2021 -0700 Hive-24741 backport to 3.1 (Nilesh Salian, reviewed by Vihang Karajgaonkar) --- .../apache/hadoop/hive/ql/metadata/TestHive.java | 57 + .../apache/hadoop/hive/metastore/ObjectStore.java | 59 +- 2 files changed, 104 insertions(+), 12 deletions(-) diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index a24b642..81418de 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -677,6 +677,63 @@ public class TestHive extends TestCase { System.err.println(StringUtils.stringifyException(e)); assertTrue("Unable to create parition for table: " + tableName, false); } + + part_spec.clear(); + part_spec.put("ds", "2008-04-08"); + part_spec.put("hr", "13"); + try { +hm.createPartition(tbl, part_spec); + } catch (HiveException e) { +System.err.println(StringUtils.stringifyException(e)); +assertTrue("Unable to create parition for table: " + tableName, false); + } + part_spec.clear(); + part_spec.put("ds", "2008-04-08"); + part_spec.put("hr", "14"); + try { +hm.createPartition(tbl, part_spec); + } catch (HiveException e) { +System.err.println(StringUtils.stringifyException(e)); +assertTrue("Unable to create parition for table: " + tableName, false); + } + part_spec.clear(); + part_spec.put("ds", "2008-04-07"); + part_spec.put("hr", "12"); + try { +hm.createPartition(tbl, part_spec); + } catch (HiveException e) { +System.err.println(StringUtils.stringifyException(e)); +assertTrue("Unable to create parition for table: " + tableName, false); + } + part_spec.clear(); + part_spec.put("ds", "2008-04-07"); + part_spec.put("hr", "13"); + try { +hm.createPartition(tbl, part_spec); + } catch (HiveException e) { +System.err.println(StringUtils.stringifyException(e)); +assertTrue("Unable to create parition for table: " + tableName, false); + } + + Map partialSpec = new HashMap<>(); + partialSpec.put("ds", "2008-04-07"); + assertEquals(2, hm.getPartitions(tbl, partialSpec).size()); + + partialSpec = new HashMap<>(); + partialSpec.put("ds", "2008-04-08"); + assertEquals(3, hm.getPartitions(tbl, partialSpec).size()); + + partialSpec = new HashMap<>(); + partialSpec.put("hr", "13"); + assertEquals(2, hm.getPartitions(tbl, partialSpec).size()); + + partialSpec = new HashMap<>(); + assertEquals(5, hm.getPartitions(tbl, partialSpec).size()); + + partialSpec = new HashMap<>(); + partialSpec.put("hr", "14"); + assertEquals(1, hm.getPartitions(tbl, partialSpec).size()); + hm.dropTable(Warehouse.DEFAULT_DATABASE_NAME, tableName); } catch (Throwable e) { System.err.println(StringUtils.stringifyException(e)); diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 6bdae6c..f32e497 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -231,6 +231,7 @@ import org.slf4j.LoggerFactory; import com.codahale.metrics.Counter; import com.codahale.metrics.MetricRegistry; import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; @@ -3177,6 +3178,27 @@ public class ObjectStore implements RawStore, Configurable { return (Collection) query.executeWithArray(dbName, catName, tableName, partNameMatcher); } + /** + * If partVals all the values are empty strings, it means we are returning + * all the partitions and hence we can attempt to use a directSQL equival
[hive] branch master updated: HIVE-25479: Browser SSO auth may fail intermittently (Vihang Karajgaonkar, reviewed by Naveen Gangam)"
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 3e25b52 HIVE-25479: Browser SSO auth may fail intermittently (Vihang Karajgaonkar, reviewed by Naveen Gangam)" 3e25b52 is described below commit 3e25b524600c62dfde8d069288a162ee6a59d25d Author: Vihang Karajgaonkar <27933586+vihan...@users.noreply.github.com> AuthorDate: Tue Oct 19 15:57:58 2021 -0700 HIVE-25479: Browser SSO auth may fail intermittently (Vihang Karajgaonkar, reviewed by Naveen Gangam)" Closes (#2728) --- .../jdbc/saml/SimpleSAMLPhpTestBrowserClient.java | 15 ++ .../auth/saml/TestHttpSamlAuthentication.java | 105 - .../java/org/apache/hive/jdbc/HiveConnection.java | 74 ++--- jdbc/src/java/org/apache/hive/jdbc/Utils.java | 2 +- .../hive/jdbc/saml/HiveJdbcBrowserClient.java | 169 ++--- .../hive/jdbc/saml/HttpBrowserClientServlet.java | 62 .../jdbc/saml/HttpSamlAuthRequestInterceptor.java | 5 + .../apache/hive/jdbc/saml/IJdbcBrowserClient.java | 42 +++-- .../service/auth/saml/HiveSamlHttpServlet.java | 14 +- 9 files changed, 345 insertions(+), 143 deletions(-) diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/saml/SimpleSAMLPhpTestBrowserClient.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/saml/SimpleSAMLPhpTestBrowserClient.java index 0e2a8b2..97f5de5 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/saml/SimpleSAMLPhpTestBrowserClient.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/saml/SimpleSAMLPhpTestBrowserClient.java @@ -36,6 +36,7 @@ public class SimpleSAMLPhpTestBrowserClient extends HiveJdbcBrowserClient { private final String username; private final String password; private final long tokenDelayMs; + private int injectFailureCount = 0; private static final Logger LOG = LoggerFactory .getLogger(SimpleSAMLPhpTestBrowserClient.class); @@ -48,6 +49,10 @@ public class SimpleSAMLPhpTestBrowserClient extends HiveJdbcBrowserClient { this.tokenDelayMs = tokenDelayMs; } + public void setInjectFailureCount(int injectFailureCount) { +this.injectFailureCount = injectFailureCount; + } + @Override protected void openBrowserWindow() throws HiveJdbcBrowserException { // if user and password are null, we fallback to real browser for interactive mode @@ -73,6 +78,16 @@ public class SimpleSAMLPhpTestBrowserClient extends HiveJdbcBrowserClient { } @Override + public void doBrowserSSO() throws HiveJdbcBrowserException { +if (injectFailureCount > 0) { + injectFailureCount--; + throw new HiveJdbcBrowserException( + "This is a injected failure for testing purpose"); +} +super.doBrowserSSO(); + } + + @Override public HiveJdbcBrowserServerResponse getServerResponse() { if (tokenDelayMs > 0) { LOG.debug("Adding a delay of {} msec", tokenDelayMs); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java index 2a69ed7..d71fded 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java @@ -21,6 +21,7 @@ package org.apache.hive.service.auth.saml; import static org.apache.hive.jdbc.Utils.JdbcConnectionParams.AUTH_BROWSER_RESPONSE_PORT; import static org.apache.hive.jdbc.Utils.JdbcConnectionParams.AUTH_BROWSER_RESPONSE_TIMEOUT_SECS; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -31,6 +32,10 @@ import com.google.common.io.Files; import com.google.common.io.Resources; import java.io.File; import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.ServerSocket; +import java.net.Socket; import java.nio.charset.StandardCharsets; import java.sql.ResultSet; import java.sql.SQLException; @@ -48,6 +53,7 @@ import java.util.concurrent.Future; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; +import org.apache.hadoop.hive.ql.metadata.TestHive; import org.apache.hive.jdbc.HiveConnection; import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.hive.jdbc.miniHS2.MiniHS2; @@ -56,6 +62,7 @@ import org.apache.hive.jdbc.saml.IJdbcBrowserClient; import org.apache.hive.jdbc.saml.IJdbcBrows
[hive] branch branch-2.3 updated: HIVE-25616: Hive-24741 backport to 2.3 (Neelesh Srinivas Salian reviewed by Vihang Karajgaonkar)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-2.3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-2.3 by this push: new 8e7f23f HIVE-25616: Hive-24741 backport to 2.3 (Neelesh Srinivas Salian reviewed by Vihang Karajgaonkar) 8e7f23f is described below commit 8e7f23f34b2ce7328c9d571a13c336f0c8cdecb6 Author: Neelesh Srinivas Salian AuthorDate: Tue Oct 19 12:50:31 2021 -0700 HIVE-25616: Hive-24741 backport to 2.3 (Neelesh Srinivas Salian reviewed by Vihang Karajgaonkar) Closes (#2730) --- .../apache/hadoop/hive/metastore/ObjectStore.java | 62 +- .../apache/hadoop/hive/ql/metadata/TestHive.java | 56 +++ 2 files changed, 104 insertions(+), 14 deletions(-) diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index f7248b1..bb69d07 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -169,6 +169,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Joiner; import com.google.common.collect.Lists; import com.google.common.collect.Maps; @@ -2565,34 +2566,67 @@ public class ObjectStore implements RawStore, Configurable { return (Collection) query.execute(dbName, tableName, partNameMatcher); } + /** + * If partVals all the values are empty strings, it means we are returning + * all the partitions and hence we can attempt to use a directSQL equivalent API which + * is considerably faster. + * @param partVals The partitions values used to filter out the partitions. + * @return true only when partVals is non-empty and contains only empty strings, + * otherwise false. If user or groups is valid then returns false since the directSQL + * doesn't support partition privileges. + */ + private boolean canTryDirectSQL(List partVals) { +if (partVals.isEmpty()) { + return false; +} +for (String val : partVals) { + if (val != null && !val.isEmpty()) { +return false; + } +} +return true; + } + @Override public List listPartitionsPsWithAuth(String db_name, String tbl_name, List part_vals, short max_parts, String userName, List groupNames) throws MetaException, InvalidObjectException, NoSuchObjectException { -List partitions = new ArrayList(); +List partitions = new ArrayList<>(); boolean success = false; QueryWrapper queryWrapper = new QueryWrapper(); - try { openTransaction(); - LOG.debug("executing listPartitionNamesPsWithAuth"); - Collection parts = getPartitionPsQueryResults(db_name, tbl_name, - part_vals, max_parts, null, queryWrapper); + MTable mtbl = getMTable(db_name, tbl_name); + if (mtbl == null) { +throw new NoSuchObjectException(db_name + "." + tbl_name + " table not found"); + } + boolean getauth = null != userName && null != groupNames && + "TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE")); + if(!getauth && canTryDirectSQL(part_vals)) { +LOG.debug( +"Redirecting to directSQL enabled API: db: {} tbl: {} partVals: {}", +db_name, tbl_name, Joiner.on(',').join(part_vals)); +return getPartitions(db_name, tbl_name, -1); + } + LOG.debug("executing listPartitionNamesPsWithAuth"); + Collection parts = getPartitionPsQueryResults(db_name, tbl_name, part_vals, + max_parts, null, queryWrapper); for (Object o : parts) { Partition part = convertToPart((MPartition) o); -//set auth privileges -if (null != userName && null != groupNames && - "TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) { - String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl - .getPartitionKeys()), part.getValues()); - PrincipalPrivilegeSet partAuth = getPartitionPrivilegeSet(db_name, - tbl_name, partName, userName, groupNames); - part.setPrivileges(partAuth); -} +// set auth privileges +String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl +.getPartitionKeys()), part.getValues()); +PrincipalPrivilegeSet partAuth = getPartitionPrivilegeSet(db_name, +tbl_name, partName, userName, groupNames); +part.setPrivileges(partAuth); partitions.add(part); } success = commitTransaction();
[hive] branch revert-2727-revert-2601-master_sso created (now 47fc141)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch revert-2727-revert-2601-master_sso in repository https://gitbox.apache.org/repos/asf/hive.git. at 47fc141 Revert "Revert "HIVE-25479: Browser SSO auth may fail intermittently (#2601)" (#2727)" No new revisions were added by this update.
[hive] branch master updated (08af0fc -> 3e66a91)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from 08af0fc HIVE-20303: INSERT OVERWRITE TABLE db.table PARTITION (...) IF NOT EXISTS throws InvalidTableException (Stamatis Zampetakis, reviewed by Alessandro Solimando, Krisztian Kasa) add 3e66a91 Revert "HIVE-25479: Browser SSO auth may fail intermittently (#2601)" (#2727) No new revisions were added by this update. Summary of changes: .../jdbc/saml/SimpleSAMLPhpTestBrowserClient.java | 15 -- .../auth/saml/TestHttpSamlAuthentication.java | 105 + .../java/org/apache/hive/jdbc/HiveConnection.java | 74 +++-- jdbc/src/java/org/apache/hive/jdbc/Utils.java | 2 +- .../hive/jdbc/saml/HiveJdbcBrowserClient.java | 169 +++-- .../hive/jdbc/saml/HttpBrowserClientServlet.java | 62 .../jdbc/saml/HttpSamlAuthRequestInterceptor.java | 5 - .../apache/hive/jdbc/saml/IJdbcBrowserClient.java | 42 ++--- .../service/auth/saml/HiveSamlHttpServlet.java | 14 +- 9 files changed, 143 insertions(+), 345 deletions(-) delete mode 100644 jdbc/src/java/org/apache/hive/jdbc/saml/HttpBrowserClientServlet.java
[hive] branch revert-2601-master_sso created (now c248bed)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch revert-2601-master_sso in repository https://gitbox.apache.org/repos/asf/hive.git. at c248bed Revert "HIVE-25479: Browser SSO auth may fail intermittently (#2601)" No new revisions were added by this update.
[hive] branch master updated: HIVE-25479: Browser SSO auth may fail intermittently (#2601)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new d9a52ce HIVE-25479: Browser SSO auth may fail intermittently (#2601) d9a52ce is described below commit d9a52ce05eafda8c1d1209d667a3bc0dd365620a Author: Vihang Karajgaonkar <27933586+vihan...@users.noreply.github.com> AuthorDate: Sat Oct 9 09:12:19 2021 -0700 HIVE-25479: Browser SSO auth may fail intermittently (#2601) --- .../jdbc/saml/SimpleSAMLPhpTestBrowserClient.java | 15 ++ .../auth/saml/TestHttpSamlAuthentication.java | 105 - .../java/org/apache/hive/jdbc/HiveConnection.java | 74 ++--- jdbc/src/java/org/apache/hive/jdbc/Utils.java | 2 +- .../hive/jdbc/saml/HiveJdbcBrowserClient.java | 169 ++--- .../hive/jdbc/saml/HttpBrowserClientServlet.java | 62 .../jdbc/saml/HttpSamlAuthRequestInterceptor.java | 5 + .../apache/hive/jdbc/saml/IJdbcBrowserClient.java | 42 +++-- .../service/auth/saml/HiveSamlHttpServlet.java | 14 +- 9 files changed, 345 insertions(+), 143 deletions(-) diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/saml/SimpleSAMLPhpTestBrowserClient.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/saml/SimpleSAMLPhpTestBrowserClient.java index 0e2a8b2..97f5de5 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/saml/SimpleSAMLPhpTestBrowserClient.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/saml/SimpleSAMLPhpTestBrowserClient.java @@ -36,6 +36,7 @@ public class SimpleSAMLPhpTestBrowserClient extends HiveJdbcBrowserClient { private final String username; private final String password; private final long tokenDelayMs; + private int injectFailureCount = 0; private static final Logger LOG = LoggerFactory .getLogger(SimpleSAMLPhpTestBrowserClient.class); @@ -48,6 +49,10 @@ public class SimpleSAMLPhpTestBrowserClient extends HiveJdbcBrowserClient { this.tokenDelayMs = tokenDelayMs; } + public void setInjectFailureCount(int injectFailureCount) { +this.injectFailureCount = injectFailureCount; + } + @Override protected void openBrowserWindow() throws HiveJdbcBrowserException { // if user and password are null, we fallback to real browser for interactive mode @@ -73,6 +78,16 @@ public class SimpleSAMLPhpTestBrowserClient extends HiveJdbcBrowserClient { } @Override + public void doBrowserSSO() throws HiveJdbcBrowserException { +if (injectFailureCount > 0) { + injectFailureCount--; + throw new HiveJdbcBrowserException( + "This is a injected failure for testing purpose"); +} +super.doBrowserSSO(); + } + + @Override public HiveJdbcBrowserServerResponse getServerResponse() { if (tokenDelayMs > 0) { LOG.debug("Adding a delay of {} msec", tokenDelayMs); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java index 2a69ed7..d71fded 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java @@ -21,6 +21,7 @@ package org.apache.hive.service.auth.saml; import static org.apache.hive.jdbc.Utils.JdbcConnectionParams.AUTH_BROWSER_RESPONSE_PORT; import static org.apache.hive.jdbc.Utils.JdbcConnectionParams.AUTH_BROWSER_RESPONSE_TIMEOUT_SECS; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -31,6 +32,10 @@ import com.google.common.io.Files; import com.google.common.io.Resources; import java.io.File; import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.ServerSocket; +import java.net.Socket; import java.nio.charset.StandardCharsets; import java.sql.ResultSet; import java.sql.SQLException; @@ -48,6 +53,7 @@ import java.util.concurrent.Future; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; +import org.apache.hadoop.hive.ql.metadata.TestHive; import org.apache.hive.jdbc.HiveConnection; import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.hive.jdbc.miniHS2.MiniHS2; @@ -56,6 +62,7 @@ import org.apache.hive.jdbc.saml.IJdbcBrowserClient; import org.apache.hive.jdbc.saml.IJdbcBrowserClient.HiveJdbcBrowserException; import org.apache.hive.jdbc.saml.IJdbcBrowserClientFactory; import org.apache.hive.jdbc.saml.SimpleS
[hive] branch master updated (d323351 -> 17e18c0)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from d323351 disable unstable tests add 17e18c0 HIVE-25055: Improve the exception handling in HMSHandler (Zhihua Deng reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../hadoop/hive/metastore/ExceptionHandler.java| 163 + .../apache/hadoop/hive/metastore/HMSHandler.java | 670 ++--- .../hive/metastore/TestExceptionHandler.java | 124 3 files changed, 468 insertions(+), 489 deletions(-) create mode 100644 standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ExceptionHandler.java create mode 100644 standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestExceptionHandler.java
[hive] branch master updated (7615944 -> d9357d6)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from 7615944 HIVE-25183: Inner Join condition parsing error in subquery (Soumyakanti Das, reviewed by Jesus Camacho Rodriguez) add d9357d6 HIVE-24987: Introduce a exception list to skip incompatible column change check (Vihang Karajgaonkar, reviewed by Yongzhi Chen) No new revisions were added by this update. Summary of changes: .../TestDisallowColChangesExceptionList.java | 129 + .../hadoop/hive/metastore/conf/MetastoreConf.java | 5 + .../DefaultIncompatibleTableChangeHandler.java | 116 ++ .../hadoop/hive/metastore/HiveAlterHandler.java| 35 +- ...va => IMetaStoreIncompatibleChangeHandler.java} | 29 ++--- 5 files changed, 269 insertions(+), 45 deletions(-) create mode 100644 itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestDisallowColChangesExceptionList.java create mode 100644 standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/DefaultIncompatibleTableChangeHandler.java copy standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/{DefaultStorageSchemaReader.java => IMetaStoreIncompatibleChangeHandler.java} (58%)
[hive] branch master updated (5854347 -> 639b41a)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from 5854347 HIVE-25019: Rename metrics that have spaces in the name (Antal Sinkovits, reviewed by Karen Coppage) add 639b41a HIVE-25005: Provide default implementation for HMS APIs (Kishen Das, reviewed by Naveen Gangam and Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../metastore/AbstractThriftHiveMetastore.java | 1610 1 file changed, 1610 insertions(+) create mode 100644 standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/AbstractThriftHiveMetastore.java
[hive] branch master updated (36176a6 -> 6e8936f)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from 36176a6 HIVE-24863: Wrong property value in UDAF percentile_cont/disc description (Krisztian Kasa, reviewed by Zoltan Haindrich) add 6e8936f HIVE-24543: Support SAML 2.0 as an authentication mechanism (Vihang Karajgaonkar, reviewed by Naveen Gangam) No new revisions were added by this update. Summary of changes: accumulo-handler/pom.xml | 4 + .../src/java/org/apache/hive/beeline/Commands.java | 3 +- cli/pom.xml| 6 + .../java/org/apache/hadoop/hive/conf/HiveConf.java | 70 ++- itests/hive-unit/pom.xml | 14 + .../jdbc/saml/SimpleSAMLPhpTestBrowserClient.java | 87 .../auth/saml/TestHttpSamlAuthentication.java | 471 + .../simple-saml-idp-metadata-template.xml | 22 + .../java/org/apache/hive/jdbc/miniHS2/MiniHS2.java | 22 + jdbc/pom.xml | 4 + .../java/org/apache/hive/jdbc/HiveConnection.java | 213 -- jdbc/src/java/org/apache/hive/jdbc/Utils.java | 14 + .../hive/jdbc/saml/HiveJdbcBrowserClient.java | 317 ++ .../jdbc/saml/HiveJdbcBrowserClientFactory.java| 57 +++ .../jdbc/saml/HiveJdbcSamlRedirectStrategy.java| 65 +++ .../jdbc/saml/HttpSamlAuthRequestInterceptor.java | 62 +++ .../apache/hive/jdbc/saml/IJdbcBrowserClient.java | 152 +++ .../hive/jdbc/saml/IJdbcBrowserClientFactory.java | 37 +- pom.xml| 91 service/pom.xml| 12 + .../auth/AuthenticationProviderFactory.java| 3 +- .../hive/service/auth/HiveAuthConstants.java | 3 +- .../apache/hive/service/auth/HiveAuthFactory.java | 8 +- .../hive/service/auth/saml/HiveSaml2Client.java| 202 + .../auth/saml/HiveSamlAuthTokenGenerator.java | 165 .../service/auth/saml/HiveSamlGroupNameFilter.java | 83 .../service/auth/saml/HiveSamlHttpServlet.java | 105 + .../HiveSamlRelayStateInfo.java} | 35 +- .../service/auth/saml/HiveSamlRelayStateStore.java | 143 +++ .../hive/service/auth/saml/HiveSamlUtils.java | 92 .../HttpSamlAuthenticationException.java} | 32 +- .../HttpSamlNoGroupsMatchedException.java} | 29 +- .../HttpSamlRedirectException.java}| 31 +- .../ISAMLAuthTokenGenerator.java} | 37 +- .../service/cli/thrift/ThriftHttpCLIService.java | 9 +- .../hive/service/cli/thrift/ThriftHttpServlet.java | 104 - .../apache/hive/service/server/HiveServer2.java| 9 + .../service/cli/thrift/ThriftHttpServletTest.java | 4 +- 38 files changed, 2640 insertions(+), 177 deletions(-) create mode 100644 itests/hive-unit/src/test/java/org/apache/hive/jdbc/saml/SimpleSAMLPhpTestBrowserClient.java create mode 100644 itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java create mode 100644 itests/hive-unit/src/test/resources/simple-saml-idp-metadata-template.xml create mode 100644 jdbc/src/java/org/apache/hive/jdbc/saml/HiveJdbcBrowserClient.java create mode 100644 jdbc/src/java/org/apache/hive/jdbc/saml/HiveJdbcBrowserClientFactory.java create mode 100644 jdbc/src/java/org/apache/hive/jdbc/saml/HiveJdbcSamlRedirectStrategy.java create mode 100644 jdbc/src/java/org/apache/hive/jdbc/saml/HttpSamlAuthRequestInterceptor.java create mode 100644 jdbc/src/java/org/apache/hive/jdbc/saml/IJdbcBrowserClient.java copy service/src/java/org/apache/hive/service/auth/HiveAuthConstants.java => jdbc/src/java/org/apache/hive/jdbc/saml/IJdbcBrowserClientFactory.java (52%) create mode 100644 service/src/java/org/apache/hive/service/auth/saml/HiveSaml2Client.java create mode 100644 service/src/java/org/apache/hive/service/auth/saml/HiveSamlAuthTokenGenerator.java create mode 100644 service/src/java/org/apache/hive/service/auth/saml/HiveSamlGroupNameFilter.java create mode 100644 service/src/java/org/apache/hive/service/auth/saml/HiveSamlHttpServlet.java copy service/src/java/org/apache/hive/service/auth/{HiveAuthConstants.java => saml/HiveSamlRelayStateInfo.java} (60%) create mode 100644 service/src/java/org/apache/hive/service/auth/saml/HiveSamlRelayStateStore.java create mode 100644 service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java copy service/src/java/org/apache/hive/service/auth/{HiveAuthConstants.java => saml/HttpSamlAuthenticationException.java} (60%) copy service/src/java/org/apache/hive/service/auth/{HiveAuthConstants.java => saml/HttpSamlNoGroupsMatchedException.java} (60%) copy service/src/java/org/apache/hive/service/auth/{HiveAuthConstants.java => saml/HttpSamlRedirectException.java} (60%
[hive] branch master updated (dc5bb8b -> 7ec4488)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from dc5bb8b disable tests add 7ec4488 HIVE-24741: get_partitions_ps_with_auth performance improvement (Vihang Karajgaonkar reviewed by Naveen Gangam) No new revisions were added by this update. Summary of changes: .../apache/hadoop/hive/ql/metadata/TestHive.java | 56 ++ .../apache/hadoop/hive/metastore/ObjectStore.java | 40 ++-- 2 files changed, 93 insertions(+), 3 deletions(-)
[hive] branch master updated (9c114b7 -> b0309b7)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from 9c114b7 HIVE-24478: Subquery GroupBy with Distinct SemanticException: Invalid column reference (Panos Garefalakis, reviewed by Krisztian Kasa) add b0309b7 HIVE-24470: Separate HiveMetaStore and HMSHandler logic (Zhou Fang reviewed by Miklos Gergely, Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../hcatalog/listener/DbNotificationListener.java | 2 +- .../hive/metastore/TestMetastoreVersion.java | 6 +- .../TestPartitionExpressionProxyDefault.java | 2 +- .../cache/TestCachedStoreUpdateUsingEvents.java| 6 +- .../hive/metastore/security/TestDBTokenStore.java | 2 +- .../ql/TestMetaStoreLimitPartitionRequest.java | 4 +- .../parse/repl/dump/events/CommitTxnHandler.java | 4 +- .../StorageBasedAuthorizationProvider.java | 5 +- .../plugin/metastore/HiveMetaStoreAuthorizer.java | 5 +- .../plugin/metastore/HiveMetaStoreAuthzInfo.java | 4 +- .../plugin/sqlstd/SQLStdHiveAccessController.java | 8 +- .../hadoop/hive/ql/txn/compactor/Cleaner.java | 2 +- .../ql/txn/compactor/MetaStoreCompactorThread.java | 2 +- .../metastore/TestHiveMetaStoreAuthorizer.java | 6 +- .../service/server/ThreadWithGarbageCleanup.java | 4 +- .../apache/hadoop/hive/metastore/HMSHandler.java | 10255 ++ .../hadoop/hive/metastore/HMSMetricsListener.java | 1 - .../hadoop/hive/metastore/HiveAlterHandler.java| 6 +- .../hadoop/hive/metastore/HiveMetaStore.java | 10441 +-- .../hive/metastore/HiveMetaStoreChecker.java | 4 +- .../apache/hadoop/hive/metastore/IHMSHandler.java | 1 - .../apache/hadoop/hive/metastore/ObjectStore.java | 4 +- .../metastore/ReplicationMetricsMaintTask.java | 2 +- .../hive/metastore/RuntimeStatsCleanerTask.java| 2 +- .../ScheduledQueryExecutionsMaintTask.java | 2 +- .../hive/metastore/TSetIpAddressProcessor.java | 1 - .../hive/metastore/events/EventCleanerTask.java| 4 +- .../hive/metastore/events/ListenerEvent.java | 6 +- .../hadoop/hive/metastore/IpAddressListener.java | 1 - .../hadoop/hive/metastore/TestAdminUser.java |10 +- .../hadoop/hive/metastore/TestHiveMetaStore.java | 4 +- .../hive/metastore/TestHiveMetaStoreMethods.java | 1 - .../hive/metastore/TestHiveMetaStoreTimeout.java |16 +- .../metastore/TestMetaStoreConnectionUrlHook.java | 3 +- .../TestMetaStoreEventListenerWithOldConf.java | 4 +- .../hadoop/hive/metastore/TestObjectStore.java | 2 +- .../metastore/TestObjectStoreStatementVerify.java | 2 +- .../hadoop/hive/metastore/TestOldSchema.java | 2 +- .../hive/metastore/cache/TestCachedStore.java | 4 +- .../hive/metastore/cache/TestCatalogCaching.java | 4 +- 40 files changed, 10439 insertions(+), 10405 deletions(-) create mode 100644 standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
[hive] branch branch-2.3 updated: HIVE-24559: Fix some spelling issues (Ricky Ma reviewed by Vihang Karajgaonkar and Miklos Gergely)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-2.3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-2.3 by this push: new 9fbcfdd HIVE-24559: Fix some spelling issues (Ricky Ma reviewed by Vihang Karajgaonkar and Miklos Gergely) 9fbcfdd is described below commit 9fbcfddd976c11e4ccc54ed1860362935cb34fac Author: RickyMa AuthorDate: Tue Dec 29 02:49:09 2020 +0800 HIVE-24559: Fix some spelling issues (Ricky Ma reviewed by Vihang Karajgaonkar and Miklos Gergely) Closes #1805 --- .../src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java | 6 +++--- ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java | 4 ++-- .../apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java | 2 +- ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidUtils.java | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java index a310c27..3023083 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java @@ -626,7 +626,7 @@ abstract class TxnHandler implements TxnStore, TxnStore.MutexAPI { * as a logical time counter. If S.commitTime < T.startTime, T and S do NOT overlap. * * Motivating example: - * Suppose we have multi-statment transactions T and S both of which are attempting x = x + 1 + * Suppose we have multi-statement transactions T and S both of which are attempting x = x + 1 * In order to prevent lost update problem, the the non-overlapping txns must lock in the snapshot * that they read appropriately. In particular, if txns do not overlap, then one follows the other * (assumig they write the same entity), and thus the 2nd must see changes of the 1st. We ensure @@ -855,7 +855,7 @@ abstract class TxnHandler implements TxnStore, TxnStore.MutexAPI { /** * As much as possible (i.e. in absence of retries) we want both operations to be done on the same * connection (but separate transactions). This avoid some flakiness in BONECP where if you - * perform an operation on 1 connection and immediately get another fron the pool, the 2nd one + * perform an operation on 1 connection and immediately get another from the pool, the 2nd one * doesn't see results of the first. * * Retry-by-caller note: If the call to lock is from a transaction, then in the worst case @@ -2430,7 +2430,7 @@ abstract class TxnHandler implements TxnStore, TxnStore.MutexAPI { * Lock acquisition is meant to be fair, so every lock can only block on some lock with smaller * hl_lock_ext_id by only checking earlier locks. * - * For any given SQL statment all locks required by it are grouped under single extLockId and are + * For any given SQL statement all locks required by it are grouped under single extLockId and are * granted all at once or all locks wait. * * This is expected to run at READ_COMMITTED. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index 6a43385..4f7698d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -463,8 +463,8 @@ public enum ErrorMsg { MATERIALIZED_VIEW_DEF_EMPTY(10403, "Query for the materialized view rebuild could not be retrieved"), MERGE_PREDIACTE_REQUIRED(10404, "MERGE statement with both UPDATE and DELETE clauses " + "requires \"AND \" on the 1st WHEN MATCHED clause of <{0}>", true), - MERGE_TOO_MANY_DELETE(10405, "MERGE statment can have at most 1 WHEN MATCHED ... DELETE clause: <{0}>", true), - MERGE_TOO_MANY_UPDATE(10406, "MERGE statment can have at most 1 WHEN MATCHED ... UPDATE clause: <{0}>", true), + MERGE_TOO_MANY_DELETE(10405, "MERGE statement can have at most 1 WHEN MATCHED ... DELETE clause: <{0}>", true), + MERGE_TOO_MANY_UPDATE(10406, "MERGE statement can have at most 1 WHEN MATCHED ... UPDATE clause: <{0}>", true), INVALID_JOIN_CONDITION(10407, "Error parsing condition in outer join"), INVALID_TARGET_COLUMN_IN_SET_CLAUSE(10408, "Target column \"{0}\" of set clause is not found in table \"{1}\".", true), HIVE_GROUPING_FUNCTION_EXPR_NOT_IN_GROUPBY(10409, "Expression in GROUPING function not present in GROUP BY"), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java index 0541a40..0aaf529 100644 --- a/ql/src/java/org/
[hive] branch master updated (f292bda -> 5171037)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from f292bda HIVE-24562: Deflake TestHivePrivilegeObjectOwnerNameAndType (Vihang Karajgaonkar, reviewed by Kishen Das) add 5171037 HIVE-24561: Deflake TestCachedStoreUpdateUsingEvents test (Vihang Karajgaonkar reviewed by Kishen Das) No new revisions were added by this update. Summary of changes: .../metastore/cache/TestCachedStoreUpdateUsingEvents.java | 14 +- .../apache/hadoop/hive/metastore/cache/SharedCache.java| 3 +++ 2 files changed, 12 insertions(+), 5 deletions(-)
[hive] branch master updated (18d7caf -> f292bda)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from 18d7caf HIVE-24556: Optimize DefaultGraphWalker for case with no grandchild (John Sherman, reviewed by Jesus Camacho Rodriguez) add f292bda HIVE-24562: Deflake TestHivePrivilegeObjectOwnerNameAndType (Vihang Karajgaonkar, reviewed by Kishen Das) No new revisions were added by this update. Summary of changes: .../authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java | 2 ++ 1 file changed, 2 insertions(+)
[hive] branch master updated (e4c6072 -> 9fb9747)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from e4c6072 HIVE-24477: Separate production and test code in TxnDbUtil (Peter Varga, reviewed by Miklos Gergely and Karen Coppage) add 9fb9747 HIVE-24513: Advance write ID during drop constraint (Kishen Das via Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../drop/AlterTableDropConstraintAnalyzer.java | 7 +++ .../drop/AlterTableDropConstraintDesc.java | 24 +- .../org/apache/hadoop/hive/ql/TestTxnCommands.java | 23 ++--- .../clientnegative/drop_invalid_constraint2.q.out | 4 +--- .../clientpositive/llap/default_constraint.q.out | 6 +++--- 5 files changed, 54 insertions(+), 10 deletions(-)
[hive] branch master updated (87c45b6 -> 8f56425)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from 87c45b6 HIVE-17709: remove sun.misc.Cleaner references (#1739) (Laszlo Bodor, contributed by David Mollitor, reviewed by David Mollitor, Prasanth Jayachandran) add 8f56425 HIVE-24276: HiveServer2 loggerconf jsp Cross-Site Scripting (XSS) Vulnerability (Rajkumar Singh, reviewed by Chao Sun, Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: service/src/resources/hive-webapps/static/js/logconf.js | 15 ++- 1 file changed, 14 insertions(+), 1 deletion(-)
[hive] branch master updated (2caad3d -> c70765c)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from 2caad3d HIVE-24488: Make tests using docker more comfortable (#1766) (Zoltan Haindrich reviewed by Krisztian Kasa) add c70765c HIVE-24482: Set advanced write Id during add constraint DDL tasks (Kishen Das, reviewed by Miklos Gergely and Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: .../add/AlterTableAddConstraintAnalyzer.java | 12 ++-- .../org/apache/hadoop/hive/ql/TestTxnCommands.java | 35 ++ .../alter_table_constraint_invalid_fk_tbl1.q.out | 4 +-- .../alter_table_constraint_invalid_pk_tbl.q.out| 4 +-- .../clientpositive/llap/default_constraint.q.out | 6 ++-- 5 files changed, 49 insertions(+), 12 deletions(-)
[hive] branch master updated (6816a7e -> 3f9cf38)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from 6816a7e Hive-24287: Using SHA-512 for Cookie signature (#1589) (Sai Hemanth Gantasala, reviewed by Yongzhi Chen) add 3f9cf38 HIVE-23962 Make bin/hive pick user defined jdbc url (Naveen Gangam and Xiaomeng Zhang, reviewed by Vihang Karajgaonkar) No new revisions were added by this update. Summary of changes: bin/hive | 16 1 file changed, 12 insertions(+), 4 deletions(-)
[hive] branch master updated: HIVE-24120 Plugin for external DatabaseProduct in standalone HMS (#1470)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 5af8a61 HIVE-24120 Plugin for external DatabaseProduct in standalone HMS (#1470) 5af8a61 is described below commit 5af8a612f844482a02fd8e59aec25c854ba8a175 Author: gatorblue <70545723+gatorb...@users.noreply.github.com> AuthorDate: Mon Oct 19 16:04:07 2020 -0400 HIVE-24120 Plugin for external DatabaseProduct in standalone HMS (#1470) --- .gitignore | 2 + .../hcatalog/listener/DbNotificationListener.java | 11 +- .../hadoop/hive/metastore/conf/MetastoreConf.java | 11 + .../hadoop/hive/metastore/DatabaseProduct.java | 644 - .../hadoop/hive/metastore/MetaStoreDirectSql.java | 40 +- .../apache/hadoop/hive/metastore/ObjectStore.java | 29 +- .../datasource/DbCPDataSourceProvider.java | 25 +- .../datasource/HikariCPDataSourceProvider.java | 25 +- .../hadoop/hive/metastore/tools/SQLGenerator.java | 144 + .../hadoop/hive/metastore/txn/TxnDbUtil.java | 154 ++--- .../hadoop/hive/metastore/txn/TxnHandler.java | 102 +--- .../hadoop/hive/metastore/DummyCustomRDBMS.java| 122 .../hadoop/hive/metastore/txn/TestTxnUtils.java| 30 +- 13 files changed, 883 insertions(+), 456 deletions(-) diff --git a/.gitignore b/.gitignore index 9dc3dc4..83859c9 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ build-eclipse .settings .factorypath *.launch +*.metadata *~ metastore_db common/src/gen @@ -39,3 +40,4 @@ launch.json settings.json kafka-handler/src/test/gen **/.vscode/ +/.recommenders/ diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java index 6041af7..d7757e6 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java @@ -35,7 +35,6 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; -import org.apache.hadoop.hive.metastore.DatabaseProduct; import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; import org.apache.hadoop.hive.metastore.MetaStoreEventListenerConstants; import org.apache.hadoop.hive.metastore.RawStore; @@ -132,7 +131,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_CATALOG_NAME; -import static org.apache.hadoop.hive.metastore.DatabaseProduct.MYSQL; /** * An implementation of {@link org.apache.hadoop.hive.metastore.MetaStoreEventListener} that @@ -959,8 +957,9 @@ public class DbNotificationListener extends TransactionalMetaStoreEventListener try { stmt = dbConn.createStatement(); - if (sqlGenerator.getDbProduct() == MYSQL) { -stmt.execute("SET @@session.sql_mode=ANSI_QUOTES"); + String st = sqlGenerator.getDbProduct().getPrepareTxnStmt(); + if (st != null) { +stmt.execute(st); } String s = sqlGenerator.addForUpdateClause("select \"WNL_FILES\", \"WNL_ID\" from" + @@ -1054,14 +1053,14 @@ public class DbNotificationListener extends TransactionalMetaStoreEventListener stmt = dbConn.createStatement(); event.setMessageFormat(msgEncoder.getMessageFormat()); - if (sqlGenerator.getDbProduct() == MYSQL) { + if (sqlGenerator.getDbProduct().isMYSQL()) { stmt.execute("SET @@session.sql_mode=ANSI_QUOTES"); } // Derby doesn't allow FOR UPDATE to lock the row being selected (See https://db.apache // .org/derby/docs/10.1/ref/rrefsqlj31783.html) . So lock the whole table. Since there's // only one row in the table, this shouldn't cause any performance degradation. - if (sqlGenerator.getDbProduct() == DatabaseProduct.DERBY) { + if (sqlGenerator.getDbProduct().isDERBY()) { String lockingQuery = "lock table \"NOTIFICATION_SEQUENCE\" in exclusive mode"; LOG.info("Going to execute query <" + lockingQuery + ">"); stmt.executeUpdate(lockingQuery); diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index c179ace..cdbe919 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastor
[hive] branch master updated: HIVE-24026: HMS/Ranger Spark view authorization plan (Sai Hemanth Gantasala reviewed by Vihang Karajgaonkar)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 7c19fc8 HIVE-24026: HMS/Ranger Spark view authorization plan (Sai Hemanth Gantasala reviewed by Vihang Karajgaonkar) 7c19fc8 is described below commit 7c19fc8fb12a1ff4afe9fe7cdadc0988fc0bc1f7 Author: saihemanth-cloudera <68923650+saihemanth-cloud...@users.noreply.github.com> AuthorDate: Wed Sep 2 14:05:01 2020 -0700 HIVE-24026: HMS/Ranger Spark view authorization plan (Sai Hemanth Gantasala reviewed by Vihang Karajgaonkar) Spark client can create/alter/drop a view (#1391) --- .../authorization/command/CommandAuthorizerV2.java | 51 +- .../plugin/metastore/HiveMetaStoreAuthorizer.java | 13 -- .../metastore/TestHiveMetaStoreAuthorizer.java | 35 +-- 3 files changed, 91 insertions(+), 8 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV2.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV2.java index e9a278d..08fcdc5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV2.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV2.java @@ -24,6 +24,10 @@ import java.util.Map; import java.util.Set; import java.util.Map.Entry; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStore; +import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionUtils; @@ -104,8 +108,25 @@ final class CommandAuthorizerV2 { continue; } if (privObject instanceof ReadEntity && !((ReadEntity)privObject).isDirect()) { -// This ReadEntity represents one of the underlying tables/views of a view, so skip it. -continue; +// This ReadEntity represents one of the underlying tables/views of a view, skip it if +// it's not inside a deferred authorized view. +ReadEntity reTable = (ReadEntity)privObject; +Boolean isDeferred = false; +if( reTable.getParents() != null && reTable.getParents().size() > 0){ + for( ReadEntity re: reTable.getParents()){ +if (re.getTyp() == Type.TABLE && re.getTable() != null ) { + Table t = re.getTable(); + if(!isDeferredAuthView(t)){ +continue; + }else{ +isDeferred = true; + } +} + } +} +if(!isDeferred){ + continue; +} } if (privObject instanceof WriteEntity && ((WriteEntity)privObject).isTempURI()) { // do not authorize temporary uris @@ -121,6 +142,32 @@ final class CommandAuthorizerV2 { return hivePrivobjs; } + /** + * A deferred authorization view is view created by non-super user like spark-user. This view contains a parameter "Authorized" + * set to false, so ranger will not authorize it during view creation. When a select statement is issued, then the ranger authorizes + * the under lying tables. + * @param Table t + * @return boolean value + */ + private static boolean isDeferredAuthView(Table t){ +String tableType = t.getTTable().getTableType(); +String authorizedKeyword = "Authorized"; +boolean isView = false; +if (TableType.MATERIALIZED_VIEW.name().equals(tableType) || TableType.VIRTUAL_VIEW.name().equals(tableType)) { + isView = true; +} +if(isView){ + Map params = t.getParameters(); + if (params != null && params.containsKey(authorizedKeyword)) { +String authorizedValue = params.get(authorizedKeyword); +if ("false".equalsIgnoreCase(authorizedValue)) { + return true; +} + } +} +return false; + } + private static void addHivePrivObject(Entity privObject, Map> tableName2Cols, List hivePrivObjs) { HivePrivilegeObjectType privObjType = AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java index fb9d6dd..23db0da 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java
[hive] 02/02: HIVE-23821: Send tableId in request for all the new HMS get_partition APIs (Kishen Das reviewed by Vihang Karajgaonkar)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git commit d87c95ecf64856860e1a37dbdb019d26c019d7bb Author: kishendas AuthorDate: Fri Aug 7 14:00:55 2020 -0700 HIVE-23821: Send tableId in request for all the new HMS get_partition APIs (Kishen Das reviewed by Vihang Karajgaonkar) This is same commit as 0a7791443dbb562e6c8a0493319fc52af793fe6b except that the commit message is updated. --- .../add/AlterTableAddPartitionOperation.java | 2 +- .../org/apache/hadoop/hive/ql/metadata/Hive.java | 45 ++- .../hadoop/hive/ql/lockmgr/TestTxnManager.java | 71 +++ ...TestHiveMetaStoreClientApiArgumentsChecker.java | 141 + .../hive/metastore/TestHiveMetaStoreClient.java| 119 + 5 files changed, 372 insertions(+), 6 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionOperation.java index ddc47a4..e5dfcb6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionOperation.java @@ -175,7 +175,7 @@ public class AlterTableAddPartitionOperation extends DDLOperation params) throws HiveException { try { - return getMSC().getPartition(dbName, tableName, params); + GetPartitionRequest req = new GetPartitionRequest(); + req.setDbName(dbName); + req.setTblName(tableName); + req.setPartVals(params); + if (AcidUtils.isTransactionalTable(t)) { +ValidWriteIdList validWriteIdList = getValidWriteIdList(dbName, tableName); +req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); +req.setId(t.getTTable().getId()); + } + GetPartitionResponse res = getMSC().getPartitionRequest(req); + return res.getPartition(); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); throw new HiveException(e); @@ -3623,6 +3642,7 @@ private void constructOneLBLocationMap(FileStatus fSta, if (AcidUtils.isTransactionalTable(t)) { ValidWriteIdList validWriteIdList = getValidWriteIdList(dbName, tblName); req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); +req.setId(t.getTTable().getId()); } GetPartitionNamesPsResponse res = getMSC().listPartitionNamesRequest(req); names = res.getNames(); @@ -3661,6 +3681,7 @@ private void constructOneLBLocationMap(FileStatus fSta, if (AcidUtils.isTransactionalTable(tbl)) { ValidWriteIdList validWriteIdList = getValidWriteIdList(tbl.getDbName(), tbl.getTableName()); req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); +req.setId(tbl.getTTable().getId()); } names = getMSC().listPartitionNames(req); @@ -3697,6 +3718,7 @@ private void constructOneLBLocationMap(FileStatus fSta, if (AcidUtils.isTransactionalTable(tbl)) { ValidWriteIdList validWriteIdList = getValidWriteIdList(tbl.getDbName(), tbl.getTableName()); req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); +req.setId(tbl.getTTable().getId()); } GetPartitionsPsWithAuthResponse res = getMSC().listPartitionsWithAuthInfoRequest(req); tParts = res.getPartitions(); @@ -4006,13 +4028,16 @@ private void constructOneLBLocationMap(FileStatus fSta, List msParts = new ArrayList<>(); ValidWriteIdList validWriteIdList = null; + + PartitionsByExprRequest req = buildPartitionByExprRequest(tbl, exprBytes, defaultPartitionName, conf, + null); + if (AcidUtils.isTransactionalTable(tbl)) { validWriteIdList = getValidWriteIdList(tbl.getDbName(), tbl.getTableName()); +req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); +req.setId(tbl.getTTable().getId()); } - PartitionsByExprRequest req = buildPartitionByExprRequest(tbl, exprBytes, defaultPartitionName, conf, - validWriteIdList != null ? validWriteIdList.toString() : null); - boolean hasUnknownParts = getMSC().listPartitionsSpecByExpr(req, msParts); partitions.addAll(convertFromPartSpec(msParts.iterator(), tbl)); @@ -5260,6 +5285,16 @@ private void constructOneLBLocationMap(FileStatus fSta, } /** + * Sets the metastore client for the current thread + * @throws MetaException + */ + @VisibleForTesting + public synchronized void setMSC(IMetaStoreClient client) + throws MetaException { +metaStoreClient =
[hive] branch master updated (0a77914 -> d87c95e)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from 0a77914 HIVE-23821: Send tableId in request for all the new HMS get_partition APIs new 82486d7 Revert "HIVE-23821: Send tableId in request for all the new HMS get_partition APIs" new d87c95e HIVE-23821: Send tableId in request for all the new HMS get_partition APIs (Kishen Das reviewed by Vihang Karajgaonkar) The 2 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes:
[hive] 01/02: Revert "HIVE-23821: Send tableId in request for all the new HMS get_partition APIs"
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git commit 82486d7043fd2fbbae9c2610233d69ec3d2c9046 Author: Vihang Karajgaonkar AuthorDate: Fri Aug 7 14:11:14 2020 -0700 Revert "HIVE-23821: Send tableId in request for all the new HMS get_partition APIs" This reverts commit 0a7791443dbb562e6c8a0493319fc52af793fe6b. --- .../add/AlterTableAddPartitionOperation.java | 2 +- .../org/apache/hadoop/hive/ql/metadata/Hive.java | 45 +-- .../hadoop/hive/ql/lockmgr/TestTxnManager.java | 71 --- ...TestHiveMetaStoreClientApiArgumentsChecker.java | 141 - .../hive/metastore/TestHiveMetaStoreClient.java| 119 - 5 files changed, 6 insertions(+), 372 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionOperation.java index e5dfcb6..ddc47a4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionOperation.java @@ -175,7 +175,7 @@ public class AlterTableAddPartitionOperation extends DDLOperation params) throws HiveException { try { - GetPartitionRequest req = new GetPartitionRequest(); - req.setDbName(dbName); - req.setTblName(tableName); - req.setPartVals(params); - if (AcidUtils.isTransactionalTable(t)) { -ValidWriteIdList validWriteIdList = getValidWriteIdList(dbName, tableName); -req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); -req.setId(t.getTTable().getId()); - } - GetPartitionResponse res = getMSC().getPartitionRequest(req); - return res.getPartition(); + return getMSC().getPartition(dbName, tableName, params); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); throw new HiveException(e); @@ -3642,7 +3623,6 @@ private void constructOneLBLocationMap(FileStatus fSta, if (AcidUtils.isTransactionalTable(t)) { ValidWriteIdList validWriteIdList = getValidWriteIdList(dbName, tblName); req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); -req.setId(t.getTTable().getId()); } GetPartitionNamesPsResponse res = getMSC().listPartitionNamesRequest(req); names = res.getNames(); @@ -3681,7 +3661,6 @@ private void constructOneLBLocationMap(FileStatus fSta, if (AcidUtils.isTransactionalTable(tbl)) { ValidWriteIdList validWriteIdList = getValidWriteIdList(tbl.getDbName(), tbl.getTableName()); req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); -req.setId(tbl.getTTable().getId()); } names = getMSC().listPartitionNames(req); @@ -3718,7 +3697,6 @@ private void constructOneLBLocationMap(FileStatus fSta, if (AcidUtils.isTransactionalTable(tbl)) { ValidWriteIdList validWriteIdList = getValidWriteIdList(tbl.getDbName(), tbl.getTableName()); req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); -req.setId(tbl.getTTable().getId()); } GetPartitionsPsWithAuthResponse res = getMSC().listPartitionsWithAuthInfoRequest(req); tParts = res.getPartitions(); @@ -4028,16 +4006,13 @@ private void constructOneLBLocationMap(FileStatus fSta, List msParts = new ArrayList<>(); ValidWriteIdList validWriteIdList = null; - - PartitionsByExprRequest req = buildPartitionByExprRequest(tbl, exprBytes, defaultPartitionName, conf, - null); - if (AcidUtils.isTransactionalTable(tbl)) { validWriteIdList = getValidWriteIdList(tbl.getDbName(), tbl.getTableName()); -req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); -req.setId(tbl.getTTable().getId()); } + PartitionsByExprRequest req = buildPartitionByExprRequest(tbl, exprBytes, defaultPartitionName, conf, + validWriteIdList != null ? validWriteIdList.toString() : null); + boolean hasUnknownParts = getMSC().listPartitionsSpecByExpr(req, msParts); partitions.addAll(convertFromPartSpec(msParts.iterator(), tbl)); @@ -5285,16 +5260,6 @@ private void constructOneLBLocationMap(FileStatus fSta, } /** - * Sets the metastore client for the current thread - * @throws MetaException - */ - @VisibleForTesting - public synchronized void setMSC(IMetaStoreClient client) - throws MetaException { -metaStoreClient = client; - } - - /** * @return the metastore client for the current thre
[hive] branch master updated: HIVE-23821: Send tableId in request for all the new HMS get_partition APIs
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 0a77914 HIVE-23821: Send tableId in request for all the new HMS get_partition APIs 0a77914 is described below commit 0a7791443dbb562e6c8a0493319fc52af793fe6b Author: kishendas AuthorDate: Fri Aug 7 14:00:55 2020 -0700 HIVE-23821: Send tableId in request for all the new HMS get_partition APIs --- .../add/AlterTableAddPartitionOperation.java | 2 +- .../org/apache/hadoop/hive/ql/metadata/Hive.java | 45 ++- .../hadoop/hive/ql/lockmgr/TestTxnManager.java | 71 +++ ...TestHiveMetaStoreClientApiArgumentsChecker.java | 141 + .../hive/metastore/TestHiveMetaStoreClient.java| 119 + 5 files changed, 372 insertions(+), 6 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionOperation.java index ddc47a4..e5dfcb6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionOperation.java @@ -175,7 +175,7 @@ public class AlterTableAddPartitionOperation extends DDLOperation params) throws HiveException { try { - return getMSC().getPartition(dbName, tableName, params); + GetPartitionRequest req = new GetPartitionRequest(); + req.setDbName(dbName); + req.setTblName(tableName); + req.setPartVals(params); + if (AcidUtils.isTransactionalTable(t)) { +ValidWriteIdList validWriteIdList = getValidWriteIdList(dbName, tableName); +req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); +req.setId(t.getTTable().getId()); + } + GetPartitionResponse res = getMSC().getPartitionRequest(req); + return res.getPartition(); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); throw new HiveException(e); @@ -3623,6 +3642,7 @@ private void constructOneLBLocationMap(FileStatus fSta, if (AcidUtils.isTransactionalTable(t)) { ValidWriteIdList validWriteIdList = getValidWriteIdList(dbName, tblName); req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); +req.setId(t.getTTable().getId()); } GetPartitionNamesPsResponse res = getMSC().listPartitionNamesRequest(req); names = res.getNames(); @@ -3661,6 +3681,7 @@ private void constructOneLBLocationMap(FileStatus fSta, if (AcidUtils.isTransactionalTable(tbl)) { ValidWriteIdList validWriteIdList = getValidWriteIdList(tbl.getDbName(), tbl.getTableName()); req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); +req.setId(tbl.getTTable().getId()); } names = getMSC().listPartitionNames(req); @@ -3697,6 +3718,7 @@ private void constructOneLBLocationMap(FileStatus fSta, if (AcidUtils.isTransactionalTable(tbl)) { ValidWriteIdList validWriteIdList = getValidWriteIdList(tbl.getDbName(), tbl.getTableName()); req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); +req.setId(tbl.getTTable().getId()); } GetPartitionsPsWithAuthResponse res = getMSC().listPartitionsWithAuthInfoRequest(req); tParts = res.getPartitions(); @@ -4006,13 +4028,16 @@ private void constructOneLBLocationMap(FileStatus fSta, List msParts = new ArrayList<>(); ValidWriteIdList validWriteIdList = null; + + PartitionsByExprRequest req = buildPartitionByExprRequest(tbl, exprBytes, defaultPartitionName, conf, + null); + if (AcidUtils.isTransactionalTable(tbl)) { validWriteIdList = getValidWriteIdList(tbl.getDbName(), tbl.getTableName()); +req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null); +req.setId(tbl.getTTable().getId()); } - PartitionsByExprRequest req = buildPartitionByExprRequest(tbl, exprBytes, defaultPartitionName, conf, - validWriteIdList != null ? validWriteIdList.toString() : null); - boolean hasUnknownParts = getMSC().listPartitionsSpecByExpr(req, msParts); partitions.addAll(convertFromPartSpec(msParts.iterator(), tbl)); @@ -5260,6 +5285,16 @@ private void constructOneLBLocationMap(FileStatus fSta, } /** + * Sets the metastore client for the current thread + * @throws MetaException + */ + @VisibleForTesting + public synchronized void setMSC(IMetaStoreClient client) + throws MetaExc
[hive] branch master updated: HIVE-23767: Pass ValidWriteIdList in get_partition* API requests (Kishen Das, reviewed by Vihang Karajgaonkar and Peter Vary)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 6b3b000 HIVE-23767: Pass ValidWriteIdList in get_partition* API requests (Kishen Das, reviewed by Vihang Karajgaonkar and Peter Vary) 6b3b000 is described below commit 6b3b000c5741fc852a76c87625578c37339bf874 Author: Kishen Das AuthorDate: Tue Jul 14 12:06:41 2020 -0700 HIVE-23767: Pass ValidWriteIdList in get_partition* API requests (Kishen Das, reviewed by Vihang Karajgaonkar and Peter Vary) --- .../org/apache/hadoop/hive/ql/metadata/Hive.java | 91 ++ .../ql/metadata/SessionHiveMetaStoreClient.java| 48 +++- .../hadoop/hive/metastore/TestMetastoreExpr.java | 3 +- ...HiveMetastoreClientListPartitionsTempTable.java | 27 --- .../ql/parse/TestUpdateDeleteSemanticAnalyzer.java | 5 ++ .../hadoop/hive/metastore/HiveMetaStoreClient.java | 45 ++- .../hadoop/hive/metastore/IMetaStoreClient.java| 16 +++- .../hadoop/hive/metastore/HiveMetaStore.java | 11 ++- .../metastore/HiveMetaStoreClientPreCatalog.java | 13 +++- 9 files changed, 219 insertions(+), 40 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 14eec31..e17086f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.metadata; import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; @@ -134,6 +135,10 @@ import org.apache.hadoop.hive.metastore.api.FireEventRequestData; import org.apache.hadoop.hive.metastore.api.ForeignKeysRequest; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse; +import org.apache.hadoop.hive.metastore.api.GetPartitionNamesPsRequest; +import org.apache.hadoop.hive.metastore.api.GetPartitionNamesPsResponse; +import org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthRequest; +import org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthResponse; import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest; import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalResponse; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; @@ -148,6 +153,7 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest; import org.apache.hadoop.hive.metastore.api.PartitionSpec; import org.apache.hadoop.hive.metastore.api.PartitionWithoutSD; +import org.apache.hadoop.hive.metastore.api.PartitionsByExprRequest; import org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest; import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.metastore.api.PrincipalType; @@ -1460,6 +1466,24 @@ public class Hive { } /** + * Get ValidWriteIdList for the current transaction. + * This fetches the ValidWriteIdList from the metastore for a given table if txnManager has an open transaction. + * + * @param dbName + * @param tableName + * @return + * @throws LockException + */ + private ValidWriteIdList getValidWriteIdList(String dbName, String tableName) throws LockException { +ValidWriteIdList validWriteIdList = null; +long txnId = SessionState.get().getTxnMgr() != null ? SessionState.get().getTxnMgr().getCurrentTxnId() : 0; +if (txnId > 0) { + validWriteIdList = AcidUtils.getTableValidWriteIdListWithTxnList(conf, dbName, tableName); +} +return validWriteIdList; + } + + /** * Get all table names for the current database. * @return List of table names * @throws HiveException @@ -3550,11 +3574,6 @@ private void constructOneLBLocationMap(FileStatus fSta, } } - public List getPartitionNames(String tblName, short max) throws HiveException { -String[] names = Utilities.getDbTableName(tblName); -return getPartitionNames(names[0], names[1], max); - } - public List getPartitionNames(String dbName, String tblName, short max) throws HiveException { List names = null; @@ -3580,7 +3599,17 @@ private void constructOneLBLocationMap(FileStatus fSta, List pvals = MetaStoreUtils.getPvals(t.getPartCols(), partSpec); try { - names = getMSC().listPartitionNames(dbName, tblName, pvals, max); + GetPartitionNamesPsRequest req = new GetPartitionNamesPsRequest(); + req.setTblName(tblName); + req.setDbName(dbName); + req.setPartValues(pvals); + req.setMaxParts(
[hive] branch master updated: HIVE-23573: Advance the write id for the table for DDL (Kishen Das reviewed by Peter Vary, Vihang Karajgaonkar)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 6440d93 HIVE-23573: Advance the write id for the table for DDL (Kishen Das reviewed by Peter Vary, Vihang Karajgaonkar) 6440d93 is described below commit 6440d93981e6d6aab59ecf2e77ffa45cd84d47de Author: Kishen Das AuthorDate: Mon Jun 29 09:46:12 2020 -0700 HIVE-23573: Advance the write id for the table for DDL (Kishen Das reviewed by Peter Vary, Vihang Karajgaonkar) --- ...estReplicationScenariosAcidTablesBootstrap.java | 4 ++-- .../add/AlterTableAddConstraintDesc.java | 2 +- .../table/misc/owner/AlterTableSetOwnerDesc.java | 2 +- .../add/AbstractAddPartitionAnalyzer.java | 17 + .../add/AlterTableAddPartitionAnalyzer.java| 2 ++ .../partition/add/AlterTableAddPartitionDesc.java | 22 -- .../drop/AlterTableDropPartitionDesc.java | 21 +++-- .../storage/cluster/AlterTableClusteredByDesc.java | 2 +- .../storage/cluster/AlterTableIntoBucketsDesc.java | 2 +- .../cluster/AlterTableNotClusteredDesc.java| 2 +- .../storage/cluster/AlterTableNotSortedDesc.java | 2 +- .../storage/compact/AlterTableCompactDesc.java | 20 ++-- .../storage/serde/AlterTableSetSerdeDesc.java | 2 +- .../storage/serde/AlterTableSetSerdePropsDesc.java | 2 +- .../fileformat/AlterTableSetFileFormatDesc.java| 2 +- .../org/apache/hadoop/hive/ql/metadata/Table.java | 17 + .../hive/ql/parse/RewriteSemanticAnalyzer.java | 8 +++- .../hadoop/hive/ql/stats/ColStatsProcessor.java| 8 ++-- .../apache/hadoop/hive/ql/TestTxnCommands2.java| 2 +- .../udf/generic/TestGenericUDTFGetSQLSchema.java | 1 + .../org/apache/hive/streaming/TestStreaming.java | 10 ++ 21 files changed, 121 insertions(+), 29 deletions(-) diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcidTablesBootstrap.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcidTablesBootstrap.java index b0c3a9e..5d94db7 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcidTablesBootstrap.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcidTablesBootstrap.java @@ -190,10 +190,10 @@ public class TestReplicationScenariosAcidTablesBootstrap prepareIncNonAcidData(primaryDbName); prepareIncAcidData(primaryDbName); // Allocate write ids for tables t1 and t2 for all txns -// t1=5+2(insert) and t2=5+5(insert, alter add column) +// t1=5+2(insert) and t2=5+6(insert, alter add column), now alter also creates a transaction Map tables = new HashMap<>(); tables.put("t1", numTxns+2L); -tables.put("t2", numTxns+5L); +tables.put("t2", numTxns+6L); allocateWriteIdsForTables(primaryDbName, tables, txnHandler, txns, primaryConf); // Bootstrap dump with open txn timeout as 1s. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintDesc.java index c05abfa..4bcbf4a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintDesc.java @@ -41,6 +41,6 @@ public class AlterTableAddConstraintDesc extends AbstractAlterTableWithConstrain @Override public boolean mayNeedWriteId() { -return false; +return true; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/owner/AlterTableSetOwnerDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/owner/AlterTableSetOwnerDesc.java index 2eb8f99..c885e19 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/owner/AlterTableSetOwnerDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/owner/AlterTableSetOwnerDesc.java @@ -47,6 +47,6 @@ public class AlterTableSetOwnerDesc extends AbstractAlterTableDesc { @Override public boolean mayNeedWriteId() { -return false; +return true; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AbstractAddPartitionAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AbstractAddPartitionAnalyzer.java index e1c8718..0736f16 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AbstractAddPartitionAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AbstractAddPartitionAnalyzer.java @@ -29,6 +29,7 @@ import org.apach
[hive] 01/02: HIVE-21851: FireEventResponse should include event id when available (Vihang Karajgaonkar, reviewed by Naveen Gangam)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git commit 7426703e60112937ed3749526fcfbfeda2bf2ace Author: Vihang Karajgaonkar AuthorDate: Mon Mar 30 16:48:08 2020 -0700 HIVE-21851: FireEventResponse should include event id when available (Vihang Karajgaonkar, reviewed by Naveen Gangam) --- .../listener/TestDbNotificationListener.java | 12 ++- .../gen/thrift/gen-cpp/hive_metastore_types.cpp| 34 ++- .../src/gen/thrift/gen-cpp/hive_metastore_types.h | 15 ++- .../hive/metastore/api/FireEventResponse.java | 112 - .../src/gen/thrift/gen-php/metastore/Types.php | 27 - .../src/gen/thrift/gen-py/hive_metastore/ttypes.py | 19 .../src/gen/thrift/gen-rb/hive_metastore_types.rb | 3 +- .../hadoop/hive/metastore/HiveMetaStore.java | 9 +- .../src/main/thrift/hive_metastore.thrift | 2 +- 9 files changed, 219 insertions(+), 14 deletions(-) diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java index 37b2bd8..bfb1982 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.FireEventRequest; import org.apache.hadoop.hive.metastore.api.FireEventRequestData; +import org.apache.hadoop.hive.metastore.api.FireEventResponse; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.FunctionType; import org.apache.hadoop.hive.metastore.api.InsertEventRequestData; @@ -98,6 +99,7 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.hcatalog.api.repl.ReplicationV1CompatRule; import org.apache.hive.hcatalog.data.Pair; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; @@ -1047,7 +1049,10 @@ public class TestDbNotificationListener { rqst.setDbName(defaultDbName); rqst.setTableName(tblName); // Event 2 -msClient.fireListenerEvent(rqst); +FireEventResponse response = msClient.fireListenerEvent(rqst); +assertTrue("Event id must be set in the fireEvent response", response.isSetEventId()); +Assert.assertNotNull(response.getEventId()); +Assert.assertTrue(response.getEventId() != -1); // Get notifications from metastore NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null); @@ -1116,7 +1121,10 @@ public class TestDbNotificationListener { rqst.setTableName(tblName); rqst.setPartitionVals(partCol1Vals); // Event 3 -msClient.fireListenerEvent(rqst); +FireEventResponse response = msClient.fireListenerEvent(rqst); +assertTrue("Event id must be set in the fireEvent response", response.isSetEventId()); +Assert.assertNotNull(response.getEventId()); +Assert.assertTrue(response.getEventId() != -1); // Get notifications from metastore NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null); diff --git a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp index 5f2948e..f3e5d3a 100644 --- a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp +++ b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp @@ -22756,6 +22756,10 @@ FireEventResponse::~FireEventResponse() throw() { } +void FireEventResponse::__set_eventId(const int64_t val) { + this->eventId = val; +} + uint32_t FireEventResponse::read(::apache::thrift::protocol::TProtocol* iprot) { apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); @@ -22775,7 +22779,20 @@ uint32_t FireEventResponse::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_STOP) { break; } -xfer += iprot->skip(ftype); +switch (fid) +{ + case 1: +if (ftype == ::apache::thrift::protocol::T_I64) { + xfer += iprot->readI64(this->eventId); + this->__isset.eventId = true; +} else { + xfer += iprot->skip(ftype); +} +break; + default: +xfer += iprot->skip(ftype); +break; +} xfer += iprot->readFieldEnd(); } @@ -22789,6 +22806,10 @@ uint32_t FireEventResponse::write(::apache::thrift::protocol::TProtocol* oprot) apache::thrift::protocol::TOutputRecurs
[hive] branch branch-3 updated (5ed2ec0 -> 64ecee6)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a change to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git. from 5ed2ec0 HIVE-23033:MSSQL metastore schema init script doesn't initialize NOTIFICATION_SEQUENCE (David Lavati via Naveen Gangam) new 7426703 HIVE-21851: FireEventResponse should include event id when available (Vihang Karajgaonkar, reviewed by Naveen Gangam) new 64ecee6 HIVE-23018: Provide a bulk API to fire multiple insert events (Vihang Karajgaonkar, reviewed by Yonghzi Chen) The 2 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: .../listener/TestDbNotificationListener.java | 119 +- .../src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp | 2218 +- .../gen/thrift/gen-cpp/hive_metastore_types.cpp| 2164 + .../src/gen/thrift/gen-cpp/hive_metastore_types.h | 31 +- .../metastore/api/ClearFileMetadataRequest.java| 32 +- .../hive/metastore/api/ClientCapabilities.java | 32 +- .../hive/metastore/api/FindSchemasByColsResp.java | 36 +- .../hive/metastore/api/FireEventRequest.java | 32 +- .../hive/metastore/api/FireEventRequestData.java | 98 +- .../hive/metastore/api/FireEventResponse.java | 163 +- .../metastore/api/GetAllFunctionsResponse.java | 36 +- .../api/GetFileMetadataByExprRequest.java | 32 +- .../metastore/api/GetFileMetadataByExprResult.java | 48 +- .../hive/metastore/api/GetFileMetadataRequest.java | 32 +- .../hive/metastore/api/GetFileMetadataResult.java | 44 +- .../hive/metastore/api/GetTablesRequest.java | 32 +- .../hadoop/hive/metastore/api/GetTablesResult.java | 36 +- .../hive/metastore/api/InsertEventRequestData.java | 223 +- .../hive/metastore/api/PutFileMetadataRequest.java | 64 +- .../hadoop/hive/metastore/api/SchemaVersion.java | 36 +- .../hive/metastore/api/ThriftHiveMetastore.java| 2432 ++-- .../hive/metastore/api/WMFullResourcePlan.java | 144 +- .../api/WMGetAllResourcePlanResponse.java | 36 +- .../api/WMGetTriggersForResourePlanResponse.java | 36 +- .../api/WMValidateResourcePlanResponse.java| 64 +- .../metastore/api/WriteNotificationLogRequest.java | 32 +- .../gen-php/metastore/ThriftHiveMetastore.php | 1368 +-- .../src/gen/thrift/gen-php/metastore/Types.php | 617 +++-- .../gen-py/hive_metastore/ThriftHiveMetastore.py | 924 .../src/gen/thrift/gen-py/hive_metastore/ttypes.py | 392 ++-- .../src/gen/thrift/gen-rb/hive_metastore_types.rb | 15 +- .../hadoop/hive/metastore/HiveMetaStore.java | 72 +- .../src/main/thrift/hive_metastore.thrift |7 +- 33 files changed, 6283 insertions(+), 5364 deletions(-)
[hive] branch master updated: HIVE-21851 : FireEventResponse should include event id when available (Vihang Karajgaonkar, reviewed by Naveen Gangam)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new c77ef8e HIVE-21851 : FireEventResponse should include event id when available (Vihang Karajgaonkar, reviewed by Naveen Gangam) c77ef8e is described below commit c77ef8e835ebd5208a082078e491c976b6ea295b Author: Vihang Karajgaonkar AuthorDate: Thu Mar 12 14:34:15 2020 -0700 HIVE-21851 : FireEventResponse should include event id when available (Vihang Karajgaonkar, reviewed by Naveen Gangam) --- .../listener/TestDbNotificationListener.java | 12 ++- .../hive/metastore/api/FireEventResponse.java | 112 - .../src/gen/thrift/gen-php/metastore/Types.php | 27 - .../src/gen/thrift/gen-py/hive_metastore/ttypes.py | 19 .../src/gen/thrift/gen-rb/hive_metastore_types.rb | 3 +- .../src/main/thrift/hive_metastore.thrift | 2 +- .../hadoop/hive/metastore/HiveMetaStore.java | 9 +- 7 files changed, 177 insertions(+), 7 deletions(-) diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java index 66bdee1..acf9b2c 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java @@ -49,6 +49,7 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.FireEventRequest; import org.apache.hadoop.hive.metastore.api.FireEventRequestData; +import org.apache.hadoop.hive.metastore.api.FireEventResponse; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.FunctionType; import org.apache.hadoop.hive.metastore.api.InsertEventRequestData; @@ -104,6 +105,7 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.hcatalog.api.repl.ReplicationV1CompatRule; import org.apache.hive.hcatalog.data.Pair; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; @@ -1171,7 +1173,10 @@ public class TestDbNotificationListener { rqst.setDbName(defaultDbName); rqst.setTableName(tblName); // Event 2 -msClient.fireListenerEvent(rqst); +FireEventResponse response = msClient.fireListenerEvent(rqst); +assertTrue("Event id must be set in the fireEvent response", response.isSetEventId()); +Assert.assertNotNull(response.getEventId()); +Assert.assertTrue(response.getEventId() != -1); // Get notifications from metastore NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null); @@ -1241,7 +1246,10 @@ public class TestDbNotificationListener { rqst.setTableName(tblName); rqst.setPartitionVals(partCol1Vals); // Event 3 -msClient.fireListenerEvent(rqst); +FireEventResponse response = msClient.fireListenerEvent(rqst); +assertTrue("Event id must be set in the fireEvent response", response.isSetEventId()); +Assert.assertNotNull(response.getEventId()); +Assert.assertTrue(response.getEventId() != -1); // Get notifications from metastore NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null); diff --git a/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventResponse.java b/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventResponse.java index 9125d86..1b48ecb 100644 --- a/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventResponse.java +++ b/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventResponse.java @@ -38,6 +38,7 @@ import org.slf4j.LoggerFactory; @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class FireEventResponse implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("FireEventResponse"); + private static final org.apache.thrift.protocol.TField EVENT_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("eventId", org.apache.thrift.protocol.TType.I64, (short)1); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { @@ -45,10 +46,11 @@ import org.slf4j.LoggerFa
[hive] branch branch-3 updated: HIVE-21932: IndexOutOfRangeException in FileChksumIterator (Vihang Karajgaonkar, reviewed by Anishek Agarwal)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3 by this push: new a665e9a HIVE-21932: IndexOutOfRangeException in FileChksumIterator (Vihang Karajgaonkar, reviewed by Anishek Agarwal) a665e9a is described below commit a665e9a34770790f49bfa3d9430eba0612c5914e Author: Vihang Karajgaonkar AuthorDate: Fri Jun 28 10:44:30 2019 -0700 HIVE-21932: IndexOutOfRangeException in FileChksumIterator (Vihang Karajgaonkar, reviewed by Anishek Agarwal) --- .../java/org/apache/hive/hcatalog/listener/DbNotificationListener.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java index e5da5d6..348c728 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java @@ -448,7 +448,7 @@ public class DbNotificationListener extends TransactionalMetaStoreEventListener public String next() { String result; try { -result = ReplChangeManager.encodeFileUri(files.get(i), chksums != null ? chksums.get(i) : null, +result = ReplChangeManager.encodeFileUri(files.get(i), (chksums != null && !chksums.isEmpty()) ? chksums.get(i) : null, subDirs != null ? subDirs.get(i) : null); } catch (IOException e) { // File operations failed
[hive] branch master updated: HIVE-21932: IndexOutOfRangeException in FileChksumIterator (Vihang Karajgaonkar, reviewed by Anishek Agarwal)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 7488e93 HIVE-21932: IndexOutOfRangeException in FileChksumIterator (Vihang Karajgaonkar, reviewed by Anishek Agarwal) 7488e93 is described below commit 7488e93519d7feaa55be6bf715df21008a7a88f2 Author: Vihang Karajgaonkar AuthorDate: Fri Jun 28 10:44:30 2019 -0700 HIVE-21932: IndexOutOfRangeException in FileChksumIterator (Vihang Karajgaonkar, reviewed by Anishek Agarwal) --- .../java/org/apache/hive/hcatalog/listener/DbNotificationListener.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java index e611394..af01178 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java @@ -519,7 +519,7 @@ public class DbNotificationListener extends TransactionalMetaStoreEventListener public String next() { String result; try { -result = ReplChangeManager.encodeFileUri(files.get(i), chksums != null ? chksums.get(i) : null, +result = ReplChangeManager.encodeFileUri(files.get(i), (chksums != null && !chksums.isEmpty()) ? chksums.get(i) : null, subDirs != null ? subDirs.get(i) : null); } catch (IOException e) { // File operations failed
[hive] branch master updated: HIVE-21586 : Thrift generated cpp files for metastore do not compile (addendum)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new ab79987 HIVE-21586 : Thrift generated cpp files for metastore do not compile (addendum) ab79987 is described below commit ab799878aff2662342197dd51bee01aa921efd10 Author: Vihang Karajgaonkar AuthorDate: Tue Apr 16 13:11:06 2019 -0700 HIVE-21586 : Thrift generated cpp files for metastore do not compile (addendum) --- .../hive/metastore/api/CommitTxnRequest.java | 4 +- .../src/gen/thrift/gen-php/metastore/Types.php | 376 ++--- .../src/gen/thrift/gen-py/hive_metastore/ttypes.py | 260 +++--- .../src/gen/thrift/gen-rb/hive_metastore_types.rb | 50 +-- .../src/main/thrift/hive_metastore.thrift | 20 +- 5 files changed, 355 insertions(+), 355 deletions(-) diff --git a/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CommitTxnRequest.java b/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CommitTxnRequest.java index 6fe23e1..bbefc3d 100644 --- a/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CommitTxnRequest.java +++ b/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CommitTxnRequest.java @@ -139,7 +139,7 @@ import org.slf4j.LoggerFactory; new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.WRITE_EVENT_INFOS, new org.apache.thrift.meta_data.FieldMetaData("writeEventInfos", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, -new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT , "WriteEventInfo"; +new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, WriteEventInfo.class; tmpMap.put(_Fields.KEY_VALUE, new org.apache.thrift.meta_data.FieldMetaData("keyValue", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, CommitTxnKeyValue.class))); tmpMap.put(_Fields.REPL_LAST_ID_INFO, new org.apache.thrift.meta_data.FieldMetaData("replLastIdInfo", org.apache.thrift.TFieldRequirementType.OPTIONAL, @@ -171,7 +171,7 @@ import org.slf4j.LoggerFactory; if (other.isSetWriteEventInfos()) { List __this__writeEventInfos = new ArrayList(other.writeEventInfos.size()); for (WriteEventInfo other_element : other.writeEventInfos) { -__this__writeEventInfos.add(other_element); +__this__writeEventInfos.add(new WriteEventInfo(other_element)); } this.writeEventInfos = __this__writeEventInfos; } diff --git a/standalone-metastore/metastore-common/src/gen/thrift/gen-php/metastore/Types.php b/standalone-metastore/metastore-common/src/gen/thrift/gen-php/metastore/Types.php index 18444a0..89af97f 100644 --- a/standalone-metastore/metastore-common/src/gen/thrift/gen-php/metastore/Types.php +++ b/standalone-metastore/metastore-common/src/gen/thrift/gen-php/metastore/Types.php @@ -18239,83 +18239,98 @@ class ReplLastIdInfo { } -class CommitTxnRequest { +class WriteEventInfo { static $_TSPEC; /** * @var int */ - public $txnid = null; + public $writeId = null; /** * @var string */ - public $replPolicy = null; + public $database = null; /** - * @var \metastore\WriteEventInfo[] + * @var string */ - public $writeEventInfos = null; + public $table = null; /** - * @var \metastore\CommitTxnKeyValue + * @var string */ - public $keyValue = null; + public $files = null; /** - * @var \metastore\ReplLastIdInfo + * @var string */ - public $replLastIdInfo = null; + public $partition = null; + /** + * @var string + */ + public $tableObj = null; + /** + * @var string + */ + public $partitionObj = null; public function __construct($vals=null) { if (!isset(self::$_TSPEC)) { self::$_TSPEC = array( 1 => array( - 'var' => 'txnid', + 'var' => 'writeId', 'type' => TType::I64, ), 2 => array( - 'var' => 'replPolicy', + 'var' => 'database', 'type' => TType::STRING, ), 3 => array( - 'var' => 'writeEventInfos', - 'type' => TType::LST, - 'etype' => TType::STRUCT, - 'elem' => array( -'type' => TType::STRUCT, -'class' => '\metastore\WriteEventI
[hive] branch master updated: HIVE-21396 : TestCliDriver#vector_groupby_reduce is flaky - rounding error (Vihang Karajgaonkar reviewed by Laszlo Bodor)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 470e966 HIVE-21396 : TestCliDriver#vector_groupby_reduce is flaky - rounding error (Vihang Karajgaonkar reviewed by Laszlo Bodor) 470e966 is described below commit 470e9662da2aaec68cd7b9ea0e5a555111f1 Author: Vihang Karajgaonkar AuthorDate: Thu Mar 28 11:05:52 2019 -0700 HIVE-21396 : TestCliDriver#vector_groupby_reduce is flaky - rounding error (Vihang Karajgaonkar reviewed by Laszlo Bodor) --- .../src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java | 1 + 1 file changed, 1 insertion(+) diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java index ed8ae54..6f04602 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java @@ -65,6 +65,7 @@ public class CliConfigs { excludeQuery("udaf_corr.q"); // disabled in HIVE-20741 excludeQuery("udaf_histogram_numeric.q"); // disabled in HIVE-20715 excludeQuery("stat_estimate_related_col.q"); // disabled in HIVE-20727 +excludeQuery("vector_groupby_reduce.q"); // Disabled in HIVE-21396 setResultsDir("ql/src/test/results/clientpositive"); setLogDir("itests/qtest/target/qfile-results/clientpositive");
[hive] branch branch-2 updated: HIVE-21484 : Metastore API getVersion() should return real version (Vihang Karajgaonkar, reviewed by Naveen Gangam and Peter Vary)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-2 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-2 by this push: new fd2f7c8 HIVE-21484 : Metastore API getVersion() should return real version (Vihang Karajgaonkar, reviewed by Naveen Gangam and Peter Vary) fd2f7c8 is described below commit fd2f7c85c84ad1f0a955325d886286d6eb515f16 Author: Vihang Karajgaonkar AuthorDate: Fri Mar 29 12:47:05 2019 -0700 HIVE-21484 : Metastore API getVersion() should return real version (Vihang Karajgaonkar, reviewed by Naveen Gangam and Peter Vary) --- .../java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java | 6 ++ .../src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java | 6 -- .../java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java | 5 + .../java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java| 7 +++ 4 files changed, 22 insertions(+), 2 deletions(-) diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java index 8ba1b9a..7e919cc 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java @@ -35,6 +35,7 @@ import java.util.Set; import junit.framework.TestCase; +import org.apache.hive.common.util.HiveVersionInfo; import org.datanucleus.api.jdo.JDOPersistenceManager; import org.datanucleus.api.jdo.JDOPersistenceManagerFactory; import org.slf4j.Logger; @@ -3265,6 +3266,11 @@ public abstract class TestHiveMetaStore extends TestCase { silentDropDatabase(dbName); } + @Test + public void testVersion() throws TException { +assertEquals(HiveVersionInfo.getVersion(), client.getServerVersion()); + } + private void checkDbOwnerType(String dbName, String ownerName, PrincipalType ownerType) throws NoSuchObjectException, MetaException, TException { Database db = client.getDatabase(dbName); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 813b8aa..454b940 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -132,6 +132,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hive.common.util.HiveStringUtils; +import org.apache.hive.common.util.HiveVersionInfo; import org.apache.hive.common.util.ShutdownHookManager; import org.apache.thrift.TException; import org.apache.thrift.TProcessor; @@ -3988,8 +3989,9 @@ public class HiveMetaStore extends ThriftHiveMetastore { @Override public String getVersion() throws TException { - endFunction(startFunction("getVersion"), true, null); - return "3.0"; + String version = HiveVersionInfo.getVersion(); + endFunction(startFunction("getVersion"), version != null, null); + return version; } @Override diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 83c2860..6dc198f 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -2567,4 +2567,9 @@ public class HiveMetaStoreClient implements IMetaStoreClient { CacheFileMetadataResult result = client.cache_file_metadata(req); return result.isIsSupported(); } + + @Override + public String getServerVersion() throws TException { +return client.getVersion(); + } } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java index a28c510..fb17187 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java @@ -1704,4 +1704,11 @@ public interface IMetaStoreClient { void addForeignKey(List foreignKeyCols) throws MetaException, NoSuchObjectException, TException; + + /** + * Gets the version string of the metastore server which this client is connected to + * + * @return String representation of the version number of Metastore server (eg: 3.1.0-SNAPSHOT) + */ + String getServerVersion() throws TException; }
[hive] branch branch-3 updated: HIVE-21484 : Metastore API getVersion() should return real version (Vihang Karajgaonkar, reviewed by Naveen Gangam and Peter Vary)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3 by this push: new a91e6ec HIVE-21484 : Metastore API getVersion() should return real version (Vihang Karajgaonkar, reviewed by Naveen Gangam and Peter Vary) a91e6ec is described below commit a91e6ec6a2ea0475fec85e4b3e1abac570d2c602 Author: Vihang Karajgaonkar AuthorDate: Fri Mar 29 10:48:30 2019 -0700 HIVE-21484 : Metastore API getVersion() should return real version (Vihang Karajgaonkar, reviewed by Naveen Gangam and Peter Vary) --- .../main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java | 5 +++-- .../java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java | 5 + .../java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java | 6 ++ .../apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java | 5 + .../java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java| 6 ++ 5 files changed, 25 insertions(+), 2 deletions(-) diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 40affef..d7c8050 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -4989,8 +4989,9 @@ public class HiveMetaStore extends ThriftHiveMetastore { @Override public String getVersion() throws TException { - endFunction(startFunction("getVersion"), true, null); - return "3.0"; + String version = MetastoreVersionInfo.getVersion(); + endFunction(startFunction("getVersion"), version != null, null); + return version; } @Override diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 573..fdfcaea 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -3306,4 +3306,9 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable { req.setMaxCreateTime(maxCreateTime); return client.get_runtime_stats(req); } + + @Override + public String getServerVersion() throws TException { +return client.getVersion(); + } } diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java index 9661beb..fe507b4 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java @@ -3696,4 +3696,10 @@ public interface IMetaStoreClient { /** Reads runtime statistics. */ List getRuntimeStats(int maxWeight, int maxCreateTime) throws TException; + /** + * Gets the version string of the metastore server which this client is connected to + * + * @return String representation of the version number of Metastore server (eg: 3.1.0-SNAPSHOT) + */ + String getServerVersion() throws TException; } diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java index d91f737..1da20c1 100644 --- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java +++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java @@ -3421,4 +3421,9 @@ public class HiveMetaStoreClientPreCatalog implements IMetaStoreClient, AutoClos public List getRuntimeStats(int maxWeight, int maxCreateTime) throws TException { throw new UnsupportedOperationException(); } + + @Override + public String getServerVersion() throws TException { +return client.getVersion(); + } } diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java index 6f52a52..542b742 100644 --- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java +++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hive.metastore.client.builder.TableBuilder; import org.apache.hadoop.hive.metasto
[hive] branch master updated: HIVE-21484 : Metastore API getVersion() should return real version (Vihang Karajgaonkar reviewed by Naveen Gangam, Peter Vary)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 23ab7f2 HIVE-21484 : Metastore API getVersion() should return real version (Vihang Karajgaonkar reviewed by Naveen Gangam, Peter Vary) 23ab7f2 is described below commit 23ab7f2d5cc6fc7c7975235fcc7f77cde47e381f Author: Vihang Karajgaonkar AuthorDate: Wed Mar 20 17:16:59 2019 -0700 HIVE-21484 : Metastore API getVersion() should return real version (Vihang Karajgaonkar reviewed by Naveen Gangam, Peter Vary) --- .../java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java | 5 + .../java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java| 7 +++ .../main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java | 5 +++-- .../hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java | 5 + .../java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java | 6 ++ 5 files changed, 26 insertions(+), 2 deletions(-) diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index fcd0d44..6357d06 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -3839,4 +3839,9 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable { public void setHadoopJobid(String jobId, long cqId) throws MetaException, TException { client.set_hadoop_jobid(jobId, cqId); } + + @Override + public String getServerVersion() throws TException { +return client.getVersion(); + } } diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java index f67761e..8402ba5 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java @@ -3948,4 +3948,11 @@ public interface IMetaStoreClient { * @throws TException */ void setHadoopJobid(String jobId, long cqId) throws MetaException, TException; + + /** + * Gets the version string of the metastore server which this client is connected to + * + * @return String representation of the version number of Metastore server (eg: 3.1.0-SNAPSHOT) + */ + String getServerVersion() throws TException; } diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 510be01..854c85f 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -5154,8 +5154,9 @@ public class HiveMetaStore extends ThriftHiveMetastore { @Override public String getVersion() throws TException { - endFunction(startFunction("getVersion"), true, null); - return "3.0"; + String version = MetastoreVersionInfo.getVersion(); + endFunction(startFunction("getVersion"), version != null, null); + return version; } @Override diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java index 481abbc..e1450a5 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java @@ -3644,4 +3644,9 @@ public class HiveMetaStoreClientPreCatalog implements IMetaStoreClient, AutoClos public void setHadoopJobid(String jobId, long cqId) throws MetaException, TException { client.set_hadoop_jobid(jobId, cqId); } + + @Override + public String getServerVersion() throws TException { +return client.getVersion(); + } } diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java index 77e0c98..0c4c84c 100644 --- a/standalone-metas
[hive] branch master updated: HIVE-21320 : get_fields() and get_tables_by_type() are not protected by HMS server access control (Na Li, reviewed by Peter Vary)
This is an automated email from the ASF dual-hosted git repository. vihangk1 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 474a19d HIVE-21320 : get_fields() and get_tables_by_type() are not protected by HMS server access control (Na Li, reviewed by Peter Vary) 474a19d is described below commit 474a19df3922fc9929302ddb0bbf34a2c28c0216 Author: Vihang Karajgaonkar AuthorDate: Wed Feb 27 10:31:47 2019 -0800 HIVE-21320 : get_fields() and get_tables_by_type() are not protected by HMS server access control (Na Li, reviewed by Peter Vary) --- .../hadoop/hive/metastore/HiveMetaStore.java | 31 +++- .../hadoop/hive/metastore/TestFilterHooks.java | 1 + .../hive/metastore/TestHmsServerAuthorization.java | 193 + 3 files changed, 222 insertions(+), 3 deletions(-) diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index c0ba867..41f399b 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -1597,7 +1597,8 @@ public class HiveMetaStore extends ThriftHiveMetastore { ConfVars.BATCH_RETRIEVE_MAX); // First pass will drop the materialized views -List materializedViewNames = get_tables_by_type(name, ".*", TableType.MATERIALIZED_VIEW.toString()); +List materializedViewNames = getTablesByTypeCore(catName, name, ".*", +TableType.MATERIALIZED_VIEW.toString()); int startIndex = 0; // retrieve the tables from the metastore in batches to alleviate memory constraints while (startIndex < materializedViewNames.size()) { @@ -5265,7 +5266,7 @@ public class HiveMetaStore extends ThriftHiveMetastore { try { ret = getMS().getTables(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], pattern); ret = FilterUtils.filterTableNamesIfEnabled(isServerFilterEnabled, filterHook, -parsedDbName[CAT_NAME], dbname, ret); +parsedDbName[CAT_NAME], parsedDbName[DB_NAME], ret); } catch (MetaException e) { ex = e; throw e; @@ -5287,7 +5288,9 @@ public class HiveMetaStore extends ThriftHiveMetastore { Exception ex = null; String[] parsedDbName = parseDbName(dbname, conf); try { -ret = getMS().getTables(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], pattern, TableType.valueOf(tableType)); +ret = getTablesByTypeCore(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], pattern, tableType); +ret = FilterUtils.filterTableNamesIfEnabled(isServerFilterEnabled, filterHook, +parsedDbName[CAT_NAME], parsedDbName[DB_NAME], ret); } catch (MetaException e) { ex = e; throw e; @@ -5300,6 +5303,27 @@ public class HiveMetaStore extends ThriftHiveMetastore { return ret; } +private List getTablesByTypeCore(final String catName, final String dbname, +final String pattern, final String tableType) throws MetaException { + startFunction("getTablesByTypeCore", ": catName=" + catName + + ": db=" + dbname + " pat=" + pattern + ",type=" + tableType); + + List ret = null; + Exception ex = null; + try { +ret = getMS().getTables(catName, dbname, pattern, TableType.valueOf(tableType)); + } catch (MetaException e) { +ex = e; +throw e; + } catch (Exception e) { +ex = e; +throw newMetaException(e); + } finally { +endFunction("getTablesByTypeCore", ret != null, ex); + } + return ret; +} + @Override public List get_materialized_views_for_rewriting(final String dbname) throws MetaException { @@ -5367,6 +5391,7 @@ public class HiveMetaStore extends ThriftHiveMetastore { try { try { tbl = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], base_table_name); + firePreEvent(new PreReadTableEvent(tbl, this)); } catch (NoSuchObjectException e) { throw new UnknownTableException(e.getMessage()); } diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java index 49c7d88..23faa74 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/h
hive git commit: HIVE-21077 : Database and Catalogs should have creation time (addendum) (Vihang Karajgaonkar, reviewed by Naveen Gangam)
Repository: hive Updated Branches: refs/heads/branch-3 e6e11a902 -> 7065c92ef HIVE-21077 : Database and Catalogs should have creation time (addendum) (Vihang Karajgaonkar, reviewed by Naveen Gangam) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7065c92e Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7065c92e Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7065c92e Branch: refs/heads/branch-3 Commit: 7065c92ef2b037f0b95e85362f57c7836b334e47 Parents: e6e11a9 Author: Vihang Karajgaonkar Authored: Tue Jan 29 10:50:52 2019 -0800 Committer: Vihang Karajgaonkar Committed: Tue Jan 29 10:50:52 2019 -0800 -- .../src/main/sql/mssql/hive-schema-3.2.0.mssql.sql | 4 ++-- .../src/main/sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/7065c92e/standalone-metastore/src/main/sql/mssql/hive-schema-3.2.0.mssql.sql -- diff --git a/standalone-metastore/src/main/sql/mssql/hive-schema-3.2.0.mssql.sql b/standalone-metastore/src/main/sql/mssql/hive-schema-3.2.0.mssql.sql index b427f00..df0b0da 100644 --- a/standalone-metastore/src/main/sql/mssql/hive-schema-3.2.0.mssql.sql +++ b/standalone-metastore/src/main/sql/mssql/hive-schema-3.2.0.mssql.sql @@ -286,7 +286,7 @@ CREATE TABLE DBS OWNER_NAME nvarchar(128) NULL, OWNER_TYPE nvarchar(10) NULL, CTLG_NAME nvarchar(256), -CREATE_TIME BIGINT +CREATE_TIME INT ); ALTER TABLE DBS ADD CONSTRAINT DBS_PK PRIMARY KEY (DB_ID); @@ -699,7 +699,7 @@ CREATE TABLE CTLGS ( "NAME" nvarchar(256), "DESC" nvarchar(4000), LOCATION_URI nvarchar(4000) not null, - CREATE_TIME BIGINT + CREATE_TIME INT ); CREATE UNIQUE INDEX UNIQUE_CTLG ON CTLGS ("NAME"); http://git-wip-us.apache.org/repos/asf/hive/blob/7065c92e/standalone-metastore/src/main/sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql -- diff --git a/standalone-metastore/src/main/sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql b/standalone-metastore/src/main/sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql index 47a585c..a5bcf78 100644 --- a/standalone-metastore/src/main/sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql +++ b/standalone-metastore/src/main/sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql @@ -17,8 +17,8 @@ ALTER TABLE TXN_WRITE_NOTIFICATION_LOG ADD CONSTRAINT TXN_WRITE_NOTIFICATION_LOG INSERT INTO SEQUENCE_TABLE (SEQUENCE_NAME, NEXT_VAL) VALUES ('org.apache.hadoop.hive.metastore.model.MTxnWriteNotificationLog', 1); -- HIVE-21077 -ALTER TABLE DBS ADD CREATE_TIME BIGINT; -ALTER TABLE CTLGS ADD CREATE_TIME BIGINT; +ALTER TABLE DBS ADD CREATE_TIME INT; +ALTER TABLE CTLGS ADD CREATE_TIME INT; -- These lines need to be last. Insert any changes above. UPDATE VERSION SET SCHEMA_VERSION='3.2.0', VERSION_COMMENT='Hive release version 3.2.0' where VER_ID=1;
hive git commit: HIVE-21077 : Database and Catalogs should have creation time (addendum) (Vihang Karajgaonkar, reviewed by Naveen Gangam)
Repository: hive Updated Branches: refs/heads/master 71dfd1d11 -> 61d8a06b9 HIVE-21077 : Database and Catalogs should have creation time (addendum) (Vihang Karajgaonkar, reviewed by Naveen Gangam) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/61d8a06b Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/61d8a06b Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/61d8a06b Branch: refs/heads/master Commit: 61d8a06b97e7df810d0aea399d1a9dcecb58bac1 Parents: 71dfd1d Author: Vihang Karajgaonkar Authored: Mon Jan 28 11:14:05 2019 -0800 Committer: Vihang Karajgaonkar Committed: Tue Jan 29 10:41:24 2019 -0800 -- .../src/main/sql/mssql/hive-schema-3.2.0.mssql.sql | 4 ++-- .../src/main/sql/mssql/hive-schema-4.0.0.mssql.sql | 4 ++-- .../src/main/sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/61d8a06b/standalone-metastore/metastore-server/src/main/sql/mssql/hive-schema-3.2.0.mssql.sql -- diff --git a/standalone-metastore/metastore-server/src/main/sql/mssql/hive-schema-3.2.0.mssql.sql b/standalone-metastore/metastore-server/src/main/sql/mssql/hive-schema-3.2.0.mssql.sql index e120128..cd04b4c 100644 --- a/standalone-metastore/metastore-server/src/main/sql/mssql/hive-schema-3.2.0.mssql.sql +++ b/standalone-metastore/metastore-server/src/main/sql/mssql/hive-schema-3.2.0.mssql.sql @@ -286,7 +286,7 @@ CREATE TABLE DBS OWNER_NAME nvarchar(128) NULL, OWNER_TYPE nvarchar(10) NULL, CTLG_NAME nvarchar(256), -CREATE_TIME BIGINT +CREATE_TIME INT ); ALTER TABLE DBS ADD CONSTRAINT DBS_PK PRIMARY KEY (DB_ID); @@ -699,7 +699,7 @@ CREATE TABLE CTLGS ( "NAME" nvarchar(256), "DESC" nvarchar(4000), LOCATION_URI nvarchar(4000) not null, - CREATE_TIME BIGINT + CREATE_TIME INT ); CREATE UNIQUE INDEX UNIQUE_CTLG ON CTLGS ("NAME"); http://git-wip-us.apache.org/repos/asf/hive/blob/61d8a06b/standalone-metastore/metastore-server/src/main/sql/mssql/hive-schema-4.0.0.mssql.sql -- diff --git a/standalone-metastore/metastore-server/src/main/sql/mssql/hive-schema-4.0.0.mssql.sql b/standalone-metastore/metastore-server/src/main/sql/mssql/hive-schema-4.0.0.mssql.sql index 895bf01..383d3bc 100644 --- a/standalone-metastore/metastore-server/src/main/sql/mssql/hive-schema-4.0.0.mssql.sql +++ b/standalone-metastore/metastore-server/src/main/sql/mssql/hive-schema-4.0.0.mssql.sql @@ -286,7 +286,7 @@ CREATE TABLE DBS OWNER_NAME nvarchar(128) NULL, OWNER_TYPE nvarchar(10) NULL, CTLG_NAME nvarchar(256), -CREATE_TIME BIGINT +CREATE_TIME INT ); ALTER TABLE DBS ADD CONSTRAINT DBS_PK PRIMARY KEY (DB_ID); @@ -700,7 +700,7 @@ CREATE TABLE CTLGS ( "NAME" nvarchar(256), "DESC" nvarchar(4000), LOCATION_URI nvarchar(4000) not null, - CREATE_TIME BIGINT + CREATE_TIME INT ); CREATE UNIQUE INDEX UNIQUE_CTLG ON CTLGS ("NAME"); http://git-wip-us.apache.org/repos/asf/hive/blob/61d8a06b/standalone-metastore/metastore-server/src/main/sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql -- diff --git a/standalone-metastore/metastore-server/src/main/sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql b/standalone-metastore/metastore-server/src/main/sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql index 803bf5e..fd78419 100644 --- a/standalone-metastore/metastore-server/src/main/sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql +++ b/standalone-metastore/metastore-server/src/main/sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql @@ -21,8 +21,8 @@ INSERT INTO SEQUENCE_TABLE (SEQUENCE_NAME, NEXT_VAL) VALUES ('org.apache.hadoop. ALTER TABLE "SERDE_PARAMS" ALTER COLUMN "PARAM_VALUE" nvarchar(MAX); -- HIVE-21077 -ALTER TABLE DBS ADD CREATE_TIME BIGINT; -ALTER TABLE CTLGS ADD CREATE_TIME BIGINT; +ALTER TABLE DBS ADD CREATE_TIME INT; +ALTER TABLE CTLGS ADD CREATE_TIME INT; -- These lines need to be last. Insert any changes above. UPDATE VERSION SET SCHEMA_VERSION='3.2.0', VERSION_COMMENT='Hive release version 3.2.0' where VER_ID=1;
hive git commit: HIVE-21083 : Remove the requirement to specify the truststore location when TLS to the database is turned on (Morio Ramdenbourg, reviewed by Karthik Manamcheri and Vihang Karajgaonkar
Repository: hive Updated Branches: refs/heads/master ce654250b -> 698206a29 HIVE-21083 : Remove the requirement to specify the truststore location when TLS to the database is turned on (Morio Ramdenbourg, reviewed by Karthik Manamcheri and Vihang Karajgaonkar) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/698206a2 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/698206a2 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/698206a2 Branch: refs/heads/master Commit: 698206a293cadff81bfe5d9874b3a1ea3accf3bb Parents: ce65425 Author: Morio Ramdenbourg Authored: Mon Jan 28 10:55:49 2019 -0800 Committer: Vihang Karajgaonkar Committed: Mon Jan 28 11:04:52 2019 -0800 -- .../hive/metastore/conf/MetastoreConf.java | 15 +++--- .../hadoop/hive/metastore/ObjectStore.java | 48 + .../hadoop/hive/metastore/TestObjectStore.java | 54 3 files changed, 65 insertions(+), 52 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/698206a2/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java -- diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index 75f0c0a..313f87b 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -461,18 +461,21 @@ public class MetastoreConf { // If DBACCESS_USE_SSL is false, then all other DBACCESS_SSL_* properties will be ignored DBACCESS_SSL_TRUSTSTORE_PASSWORD("metastore.dbaccess.ssl.truststore.password", "hive.metastore.dbaccess.ssl.truststore.password", "", "Password for the Java truststore file that is used when encrypting the connection to the database store. \n" -+ "This directly maps to the javax.net.ssl.trustStorePassword Java system property. \n" -+ "While Java does allow an empty truststore password, we highly recommend against this. \n" -+ "An empty password can compromise the integrity of the truststore file."), ++ "metastore.dbaccess.ssl.use.SSL must be set to true for this property to take effect. \n" ++ "This directly maps to the javax.net.ssl.trustStorePassword Java system property. Defaults to jssecacerts, if it exists, otherwise uses cacerts. \n" ++ "It is recommended to specify the password using a credential provider so as to not expose it to discovery by other users. \n" ++ "One way to do this is by using the Hadoop CredentialProvider API and provisioning credentials for this property. Refer to the Hadoop CredentialProvider API Guide for more details."), DBACCESS_SSL_TRUSTSTORE_PATH("metastore.dbaccess.ssl.truststore.path", "hive.metastore.dbaccess.ssl.truststore.path", "", "Location on disk of the Java truststore file to use when encrypting the connection to the database store. \n" -+ "This directly maps to the javax.net.ssl.trustStore Java system property. \n" -+ "This file consists of a collection of certificates trusted by the metastore server.\n"), ++ "This file consists of a collection of certificates trusted by the metastore server. \n" ++ "metastore.dbaccess.ssl.use.SSL must be set to true for this property to take effect. \n" ++ "This directly maps to the javax.net.ssl.trustStore Java system property. Defaults to the default Java truststore file. \n"), DBACCESS_SSL_TRUSTSTORE_TYPE("metastore.dbaccess.ssl.truststore.type", "hive.metastore.dbaccess.ssl.truststore.type", "jks", new StringSetValidator("jceks", "jks", "dks", "pkcs11", "pkcs12"), "File type for the Java truststore file that is used when encrypting the connection to the database store. \n" ++ "metastore.dbaccess.ssl.use.SSL must be set to true for this property to take effect. \n" + "This directly maps to the javax.net.ssl.trustStoreType Java system property. \n" -+ "Types jceks, jks, dks, pkcs11, and pkcs12 can be read from Java 8 and beyond. We default to jks. \n"), ++ "Types jceks, jks, dks, pkcs11, and pkcs12 can be read from Java 8 and beyond. Defaults to jks."), DBACCESS_USE_SSL("metastore.dbaccess.ssl.use.SSL", "hive.metastore.dbaccess.ssl.use.SSL", false, "Set this to true to use SSL encryption to the database store."),
hive git commit: HIVE-20776 : Run HMS filterHooks on server-side in addition to client-side (Na Li reviewed by Karthik, Sergio, Morio, Adam and Vihang Karajgaonkar)
Repository: hive Updated Branches: refs/heads/master d1460174d -> dfd63d979 HIVE-20776 : Run HMS filterHooks on server-side in addition to client-side (Na Li reviewed by Karthik, Sergio, Morio, Adam and Vihang Karajgaonkar) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/dfd63d97 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/dfd63d97 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/dfd63d97 Branch: refs/heads/master Commit: dfd63d97902b359e1643e955a4d070ac983debd5 Parents: d146017 Author: Na Li Authored: Tue Jan 22 10:43:25 2019 -0800 Committer: Vihang Karajgaonkar Committed: Tue Jan 22 11:08:02 2019 -0800 -- .../hive/metastore/HiveMetaStoreClient.java | 141 --- .../hive/metastore/conf/MetastoreConf.java | 8 +- .../hive/metastore/utils/FilterUtils.java | 375 +++ .../hadoop/hive/metastore/HiveMetaStore.java| 168 - .../hadoop/hive/metastore/TestFilterHooks.java | 304 ++- .../metastore/client/TestListPartitions.java| 4 +- 6 files changed, 849 insertions(+), 151 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/dfd63d97/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java -- diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 19bd9ba..30edc56 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -19,7 +19,9 @@ package org.apache.hadoop.hive.metastore; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME; +import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.CAT_NAME; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; +import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.parseDbName; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.prependCatalogToDbName; import java.io.IOException; @@ -65,6 +67,7 @@ import org.apache.hadoop.hive.metastore.hooks.URIResolverHook; import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge; import org.apache.hadoop.hive.metastore.txn.TxnCommonUtils; +import org.apache.hadoop.hive.metastore.utils.FilterUtils; import org.apache.hadoop.hive.metastore.utils.JavaUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.metastore.utils.ObjectPair; @@ -127,6 +130,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable { private String tokenStrForm; private final boolean localMetaStore; private final MetaStoreFilterHook filterHook; + private final boolean isClientFilterEnabled; private final URIResolverHook uriResolverHook; private final int fileMetadataBatchSize; @@ -164,6 +168,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable { } version = MetastoreConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST) ? TEST_VERSION : VERSION; filterHook = loadFilterHooks(); +isClientFilterEnabled = getIfClientFilterEnabled(); uriResolverHook = loadUriResolverHook(); fileMetadataBatchSize = MetastoreConf.getIntVar( conf, ConfVars.BATCH_RETRIEVE_OBJECTS_MAX); @@ -276,6 +281,12 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable { return null; } + private boolean getIfClientFilterEnabled() { +boolean isEnabled = MetastoreConf.getBoolVar(conf, ConfVars.METASTORE_CLIENT_FILTER_ENABLED); +LOG.info("HMS client filtering is " + (isEnabled?"enabled.":"disabled.")); + +return isEnabled; + } private void resolveUris() throws MetaException { String thriftUris = MetastoreConf.getVar(conf, ConfVars.THRIFT_URIS); String serviceDiscoveryMode = MetastoreConf.getVar(conf, ConfVars.THRIFT_SERVICE_DISCOVERY_MODE); @@ -731,13 +742,15 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable { @Override public Catalog getCatalog(String catName) throws TException { GetCatalogResponse rsp = client.get_catalog(new GetCatalogRequest(catName)); -return rsp == null ? null : filterHook.filterCatalog(rsp.getCatalog()); +return rsp == null ? +null : FilterUtils.filterCatalogIfEnabled(isClientFilterEnabled, filterHook, rsp.getCatalog()); }
hive git commit: HIVE-21077 : Database and Catalogs should have creation time (Vihang Karajgaonkar reviewed by Karthik Manamcheri and Bharath Krishna)
Repository: hive Updated Branches: refs/heads/branch-3 4c73511f3 -> 1685dc311 HIVE-21077 : Database and Catalogs should have creation time (Vihang Karajgaonkar reviewed by Karthik Manamcheri and Bharath Krishna) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1685dc31 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1685dc31 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1685dc31 Branch: refs/heads/branch-3 Commit: 1685dc3113fdb731d85e6c06a873b1ad8f1ff36d Parents: 4c73511 Author: Vihang Karajgaonkar Authored: Thu Jan 3 10:56:05 2019 -0800 Committer: Vihang Karajgaonkar Committed: Tue Jan 22 10:29:34 2019 -0800 -- .../gen/thrift/gen-cpp/hive_metastore_types.cpp | 44 .../gen/thrift/gen-cpp/hive_metastore_types.h | 24 +++- .../hadoop/hive/metastore/api/Catalog.java | 111 ++- .../hadoop/hive/metastore/api/Database.java | 111 ++- .../src/gen/thrift/gen-php/metastore/Types.php | 46 .../gen/thrift/gen-py/hive_metastore/ttypes.py | 30 - .../gen/thrift/gen-rb/hive_metastore_types.rb | 8 +- .../hadoop/hive/metastore/HiveMetaStore.java| 18 ++- .../hive/metastore/MetaStoreDirectSql.java | 3 +- .../hadoop/hive/metastore/ObjectStore.java | 4 + .../client/builder/DatabaseBuilder.java | 7 ++ .../hadoop/hive/metastore/model/MCatalog.java | 9 ++ .../hadoop/hive/metastore/model/MDatabase.java | 9 ++ .../src/main/resources/package.jdo | 8 ++ .../main/sql/derby/hive-schema-3.2.0.derby.sql | 6 +- .../sql/derby/upgrade-3.1.0-to-3.2.0.derby.sql | 4 + .../main/sql/mssql/hive-schema-3.2.0.mssql.sql | 6 +- .../sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql | 4 + .../main/sql/mysql/hive-schema-3.2.0.mysql.sql | 2 + .../sql/mysql/upgrade-3.1.0-to-3.2.0.mysql.sql | 4 + .../sql/oracle/hive-schema-3.2.0.oracle.sql | 3 +- .../oracle/upgrade-3.1.0-to-3.2.0.oracle.sql| 4 + .../sql/postgres/hive-schema-3.2.0.postgres.sql | 6 +- .../upgrade-3.1.0-to-3.2.0.postgres.sql | 4 + .../src/main/thrift/hive_metastore.thrift | 10 +- .../hive/metastore/cache/TestCachedStore.java | 1 + .../hive/metastore/client/TestCatalogs.java | 2 + .../hive/metastore/client/TestDatabases.java| 29 + .../tools/TestSchemaToolForMetastore.java | 27 +++-- 29 files changed, 499 insertions(+), 45 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/1685dc31/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp -- diff --git a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp index 26420dd..b848718 100644 --- a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp +++ b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp @@ -4456,6 +4456,11 @@ void Catalog::__set_locationUri(const std::string& val) { this->locationUri = val; } +void Catalog::__set_createTime(const int32_t val) { + this->createTime = val; +__isset.createTime = true; +} + uint32_t Catalog::read(::apache::thrift::protocol::TProtocol* iprot) { apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); @@ -4501,6 +4506,14 @@ uint32_t Catalog::read(::apache::thrift::protocol::TProtocol* iprot) { xfer += iprot->skip(ftype); } break; + case 4: +if (ftype == ::apache::thrift::protocol::T_I32) { + xfer += iprot->readI32(this->createTime); + this->__isset.createTime = true; +} else { + xfer += iprot->skip(ftype); +} +break; default: xfer += iprot->skip(ftype); break; @@ -4531,6 +4544,11 @@ uint32_t Catalog::write(::apache::thrift::protocol::TProtocol* oprot) const { xfer += oprot->writeString(this->locationUri); xfer += oprot->writeFieldEnd(); + if (this->__isset.createTime) { +xfer += oprot->writeFieldBegin("createTime", ::apache::thrift::protocol::T_I32, 4); +xfer += oprot->writeI32(this->createTime); +xfer += oprot->writeFieldEnd(); + } xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); return xfer; @@ -4541,6 +4559,7 @@ void swap(Catalog , Catalog ) { swap(a.name, b.name); swap(a.description, b.description); swap(a.locationUri, b.locationUri); + swap(a.createTime, b.createTime); swap(a.__isset, b.__isset); } @@ -4548,12 +4567,14 @@ Catalog::Catalog(const Catalog& other130) { name = other130.name; description = other130.description; locationUri = other130.locationUri; + createTime = other130.createTime; __isset = other130.__isset; }
hive git commit: HIVE-21077 : Database and Catalogs should have creation time (Vihang Karajgaonkar reviewed by Karthik Manamcheri and Bharath Krishna)
Repository: hive Updated Branches: refs/heads/master cb74a685c -> d1460174d HIVE-21077 : Database and Catalogs should have creation time (Vihang Karajgaonkar reviewed by Karthik Manamcheri and Bharath Krishna) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d1460174 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d1460174 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d1460174 Branch: refs/heads/master Commit: d1460174d5322bcfb642b489dd250c59770f0551 Parents: cb74a68 Author: Vihang Karajgaonkar Authored: Thu Jan 3 10:56:05 2019 -0800 Committer: Vihang Karajgaonkar Committed: Tue Jan 22 10:17:53 2019 -0800 -- .../hadoop/hive/metastore/api/Catalog.java | 111 ++- .../hadoop/hive/metastore/api/Database.java | 111 ++- .../src/gen/thrift/gen-php/metastore/Types.php | 46 .../gen/thrift/gen-py/hive_metastore/ttypes.py | 30 - .../gen/thrift/gen-rb/hive_metastore_types.rb | 8 +- .../src/main/thrift/hive_metastore.thrift | 10 +- .../hadoop/hive/metastore/HiveMetaStore.java| 18 ++- .../hive/metastore/MetaStoreDirectSql.java | 3 +- .../hadoop/hive/metastore/ObjectStore.java | 4 + .../client/builder/DatabaseBuilder.java | 7 ++ .../hadoop/hive/metastore/model/MCatalog.java | 9 ++ .../hadoop/hive/metastore/model/MDatabase.java | 9 ++ .../src/main/resources/package.jdo | 8 ++ .../main/sql/derby/hive-schema-3.2.0.derby.sql | 6 +- .../main/sql/derby/hive-schema-4.0.0.derby.sql | 6 +- .../sql/derby/upgrade-3.1.0-to-3.2.0.derby.sql | 4 + .../sql/derby/upgrade-3.2.0-to-4.0.0.derby.sql | 1 - .../main/sql/mssql/hive-schema-3.2.0.mssql.sql | 6 +- .../main/sql/mssql/hive-schema-4.0.0.mssql.sql | 6 +- .../sql/mssql/upgrade-3.1.0-to-3.2.0.mssql.sql | 4 + .../main/sql/mysql/hive-schema-3.2.0.mysql.sql | 2 + .../main/sql/mysql/hive-schema-4.0.0.mysql.sql | 2 + .../sql/mysql/upgrade-3.1.0-to-3.2.0.mysql.sql | 4 + .../sql/oracle/hive-schema-3.2.0.oracle.sql | 4 +- .../sql/oracle/hive-schema-4.0.0.oracle.sql | 4 +- .../oracle/upgrade-3.1.0-to-3.2.0.oracle.sql| 3 + .../sql/postgres/hive-schema-3.2.0.postgres.sql | 6 +- .../sql/postgres/hive-schema-4.0.0.postgres.sql | 6 +- .../upgrade-3.1.0-to-3.2.0.postgres.sql | 4 + .../hive/metastore/cache/TestCachedStore.java | 1 + .../hive/metastore/client/TestCatalogs.java | 2 + .../hive/metastore/client/TestDatabases.java| 29 + .../tools/TestSchemaToolForMetastore.java | 27 +++-- 33 files changed, 452 insertions(+), 49 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/d1460174/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java -- diff --git a/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java b/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java index 3eb4dbd..3e968dc 100644 --- a/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java +++ b/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java @@ -41,6 +41,7 @@ import org.slf4j.LoggerFactory; private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField DESCRIPTION_FIELD_DESC = new org.apache.thrift.protocol.TField("description", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField LOCATION_URI_FIELD_DESC = new org.apache.thrift.protocol.TField("locationUri", org.apache.thrift.protocol.TType.STRING, (short)3); + private static final org.apache.thrift.protocol.TField CREATE_TIME_FIELD_DESC = new org.apache.thrift.protocol.TField("createTime", org.apache.thrift.protocol.TType.I32, (short)4); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { @@ -51,12 +52,14 @@ import org.slf4j.LoggerFactory; private String name; // required private String description; // optional private String locationUri; // required + private int createTime; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { NAME((short)1, "name"), DESCRIPTION((short)2, "description"), -
hive git commit: Revert "HIVE-21077 : Database and Catalogs should have creation time (Vihang Karajgaonkar reviewed by Karthik Manamcheri and Bharath Krishna)"
Repository: hive Updated Branches: refs/heads/master 4d8320d46 -> 4d5d80609 Revert "HIVE-21077 : Database and Catalogs should have creation time (Vihang Karajgaonkar reviewed by Karthik Manamcheri and Bharath Krishna)" This reverts commit 4d8320d467d30e4e125a3304407415885670f9c7. Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4d5d8060 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4d5d8060 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4d5d8060 Branch: refs/heads/master Commit: 4d5d80609608521db3d12916f9900b88a94aff94 Parents: 4d8320d Author: Vihang Karajgaonkar Authored: Wed Jan 16 12:27:56 2019 -0800 Committer: Vihang Karajgaonkar Committed: Wed Jan 16 12:27:56 2019 -0800 -- .../hadoop/hive/metastore/api/Catalog.java | 111 +-- .../hadoop/hive/metastore/api/Database.java | 111 +-- .../src/gen/thrift/gen-php/metastore/Types.php | 46 .../gen/thrift/gen-py/hive_metastore/ttypes.py | 30 + .../gen/thrift/gen-rb/hive_metastore_types.rb | 8 +- .../src/main/thrift/hive_metastore.thrift | 10 +- .../hadoop/hive/metastore/HiveMetaStore.java| 18 +-- .../hive/metastore/MetaStoreDirectSql.java | 3 +- .../hadoop/hive/metastore/ObjectStore.java | 4 - .../client/builder/DatabaseBuilder.java | 7 -- .../hadoop/hive/metastore/model/MCatalog.java | 9 -- .../hadoop/hive/metastore/model/MDatabase.java | 9 -- .../src/main/resources/package.jdo | 8 -- .../main/sql/derby/hive-schema-4.0.0.derby.sql | 6 +- .../sql/derby/upgrade-3.2.0-to-4.0.0.derby.sql | 4 +- .../main/sql/mssql/hive-schema-4.0.0.mssql.sql | 6 +- .../sql/mssql/upgrade-3.2.0-to-4.0.0.mssql.sql | 3 - .../main/sql/mysql/hive-schema-4.0.0.mysql.sql | 2 - .../sql/mysql/upgrade-3.2.0-to-4.0.0.mysql.sql | 3 - .../sql/oracle/hive-schema-4.0.0.oracle.sql | 4 +- .../oracle/upgrade-3.2.0-to-4.0.0.oracle.sql| 3 - .../sql/postgres/hive-schema-4.0.0.postgres.sql | 6 +- .../upgrade-3.2.0-to-4.0.0.postgres.sql | 3 - .../hive/metastore/cache/TestCachedStore.java | 1 - .../hive/metastore/client/TestCatalogs.java | 2 - .../hive/metastore/client/TestDatabases.java| 29 - .../tools/TestSchemaToolForMetastore.java | 27 ++--- 27 files changed, 42 insertions(+), 431 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/4d5d8060/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java -- diff --git a/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java b/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java index 3e968dc..3eb4dbd 100644 --- a/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java +++ b/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java @@ -41,7 +41,6 @@ import org.slf4j.LoggerFactory; private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField DESCRIPTION_FIELD_DESC = new org.apache.thrift.protocol.TField("description", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField LOCATION_URI_FIELD_DESC = new org.apache.thrift.protocol.TField("locationUri", org.apache.thrift.protocol.TType.STRING, (short)3); - private static final org.apache.thrift.protocol.TField CREATE_TIME_FIELD_DESC = new org.apache.thrift.protocol.TField("createTime", org.apache.thrift.protocol.TType.I32, (short)4); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { @@ -52,14 +51,12 @@ import org.slf4j.LoggerFactory; private String name; // required private String description; // optional private String locationUri; // required - private int createTime; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { NAME((short)1, "name"), DESCRIPTION((short)2, "description"), -LOCATION_URI((short)3, "locationUri"), -CREATE_TIME((short)4, "createTime"); +LOCATION_URI((short)3, "locationUri"); private static final Map byName = new HashMap(); @@ -80,8 +77,6 @@ import org.slf4j.LoggerFactory; return DESCRIPTION;
hive git commit: HIVE-21077 : Database and Catalogs should have creation time (Vihang Karajgaonkar reviewed by Karthik Manamcheri and Bharath Krishna)
Repository: hive Updated Branches: refs/heads/master 1fff171c6 -> 4d8320d46 HIVE-21077 : Database and Catalogs should have creation time (Vihang Karajgaonkar reviewed by Karthik Manamcheri and Bharath Krishna) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4d8320d4 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4d8320d4 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4d8320d4 Branch: refs/heads/master Commit: 4d8320d467d30e4e125a3304407415885670f9c7 Parents: 1fff171 Author: Vihang Karajgaonkar Authored: Thu Jan 3 10:56:05 2019 -0800 Committer: Vihang Karajgaonkar Committed: Wed Jan 16 10:26:41 2019 -0800 -- .../hadoop/hive/metastore/api/Catalog.java | 111 ++- .../hadoop/hive/metastore/api/Database.java | 111 ++- .../src/gen/thrift/gen-php/metastore/Types.php | 46 .../gen/thrift/gen-py/hive_metastore/ttypes.py | 30 - .../gen/thrift/gen-rb/hive_metastore_types.rb | 8 +- .../src/main/thrift/hive_metastore.thrift | 10 +- .../hadoop/hive/metastore/HiveMetaStore.java| 18 ++- .../hive/metastore/MetaStoreDirectSql.java | 3 +- .../hadoop/hive/metastore/ObjectStore.java | 4 + .../client/builder/DatabaseBuilder.java | 7 ++ .../hadoop/hive/metastore/model/MCatalog.java | 9 ++ .../hadoop/hive/metastore/model/MDatabase.java | 9 ++ .../src/main/resources/package.jdo | 8 ++ .../main/sql/derby/hive-schema-4.0.0.derby.sql | 6 +- .../sql/derby/upgrade-3.2.0-to-4.0.0.derby.sql | 4 +- .../main/sql/mssql/hive-schema-4.0.0.mssql.sql | 6 +- .../sql/mssql/upgrade-3.2.0-to-4.0.0.mssql.sql | 3 + .../main/sql/mysql/hive-schema-4.0.0.mysql.sql | 2 + .../sql/mysql/upgrade-3.2.0-to-4.0.0.mysql.sql | 3 + .../sql/oracle/hive-schema-4.0.0.oracle.sql | 4 +- .../oracle/upgrade-3.2.0-to-4.0.0.oracle.sql| 3 + .../sql/postgres/hive-schema-4.0.0.postgres.sql | 6 +- .../upgrade-3.2.0-to-4.0.0.postgres.sql | 3 + .../hive/metastore/cache/TestCachedStore.java | 1 + .../hive/metastore/client/TestCatalogs.java | 2 + .../hive/metastore/client/TestDatabases.java| 29 + .../tools/TestSchemaToolForMetastore.java | 27 +++-- 27 files changed, 431 insertions(+), 42 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/4d8320d4/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java -- diff --git a/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java b/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java index 3eb4dbd..3e968dc 100644 --- a/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java +++ b/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Catalog.java @@ -41,6 +41,7 @@ import org.slf4j.LoggerFactory; private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField DESCRIPTION_FIELD_DESC = new org.apache.thrift.protocol.TField("description", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField LOCATION_URI_FIELD_DESC = new org.apache.thrift.protocol.TField("locationUri", org.apache.thrift.protocol.TType.STRING, (short)3); + private static final org.apache.thrift.protocol.TField CREATE_TIME_FIELD_DESC = new org.apache.thrift.protocol.TField("createTime", org.apache.thrift.protocol.TType.I32, (short)4); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { @@ -51,12 +52,14 @@ import org.slf4j.LoggerFactory; private String name; // required private String description; // optional private String locationUri; // required + private int createTime; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { NAME((short)1, "name"), DESCRIPTION((short)2, "description"), -LOCATION_URI((short)3, "locationUri"); +LOCATION_URI((short)3, "locationUri"), +CREATE_TIME((short)4, "createTime"); private static final Map byName = new HashMap(); @@ -77,6 +80,8 @@ import org.slf4j.LoggerFactory; return DESCRIPTION; case 3: // LOCATION_URI return LOCATION_URI; +case
hive git commit: HIVE-21040 : msck does unnecessary file listing at last level of directory tree (Vihang Karajgaonkar, reviewed by Prasanth Jayachandran)
Repository: hive Updated Branches: refs/heads/branch-2 0083145ef -> 0b8cfa7cd HIVE-21040 : msck does unnecessary file listing at last level of directory tree (Vihang Karajgaonkar, reviewed by Prasanth Jayachandran) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0b8cfa7c Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0b8cfa7c Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0b8cfa7c Branch: refs/heads/branch-2 Commit: 0b8cfa7cda02227bdbd3dd2a55fd4a2133b9449b Parents: 0083145 Author: Vihang Karajgaonkar Authored: Mon Dec 17 17:13:56 2018 -0800 Committer: Vihang Karajgaonkar Committed: Wed Jan 2 12:08:23 2019 -0800 -- .../hive/ql/metadata/HiveMetaStoreChecker.java | 16 +-- .../ql/metadata/TestHiveMetaStoreChecker.java | 112 ++- 2 files changed, 120 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/0b8cfa7c/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java index 84c0902..9daf860 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.session.SessionState; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -471,10 +472,13 @@ public class HiveMetaStoreChecker { throws IOException, HiveException, InterruptedException { final Path currentPath = pd.p; final int currentDepth = pd.depth; + if (currentDepth == maxDepth) { +return currentPath; + } FileStatus[] fileStatuses = fs.listStatus(currentPath, FileUtils.HIDDEN_FILES_PATH_FILTER); // found no files under a sub-directory under table base path; it is possible that the table // is empty and hence there are no partition sub-directories created under base path - if (fileStatuses.length == 0 && currentDepth > 0 && currentDepth < maxDepth) { + if (fileStatuses.length == 0 && currentDepth > 0) { // since maxDepth is not yet reached, we are missing partition // columns in currentPath if (throwException) { @@ -486,7 +490,7 @@ public class HiveMetaStoreChecker { } else { // found files under currentPath add them to the queue if it is a directory for (FileStatus fileStatus : fileStatuses) { - if (!fileStatus.isDirectory() && currentDepth < maxDepth) { + if (!fileStatus.isDirectory()) { // found a file at depth which is less than number of partition keys if (throwException) { throw new HiveException( @@ -501,9 +505,6 @@ public class HiveMetaStoreChecker { pendingPaths.add(new PathDepthInfo(fileStatus.getPath(), currentDepth + 1)); } } -if (currentDepth == maxDepth) { - return currentPath; -} } return null; } @@ -518,7 +519,8 @@ public class HiveMetaStoreChecker { } } - private void checkPartitionDirs(final ExecutorService executor, + @VisibleForTesting + void checkPartitionDirs(final ExecutorService executor, final Path basePath, final Set result, final FileSystem fs, final int maxDepth) throws HiveException { try { @@ -549,7 +551,7 @@ public class HiveMetaStoreChecker { nextLevel = tempQueue; } } catch (InterruptedException | ExecutionException e) { - LOG.error(e.getMessage()); + LOG.error("Exception received while listing partition directories", e); executor.shutdownNow(); throw new HiveException(e.getCause()); } http://git-wip-us.apache.org/repos/asf/hive/blob/0b8cfa7c/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java -- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java index 21bc8ee..b8d7503 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java @@ -17,15 +17,30 @@ */ package org.apache.hadoop.hive.ql.metadata; +import static org.junit.Assert.*; +import static
hive git commit: HIVE-21040 : msck does unnecessary file listing at last level of directory tree (Vihang Karajgaonkar, reviewed by Prasanth Jayachandran)
Repository: hive Updated Branches: refs/heads/branch-3 490041dd3 -> 4c73511f3 HIVE-21040 : msck does unnecessary file listing at last level of directory tree (Vihang Karajgaonkar, reviewed by Prasanth Jayachandran) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4c73511f Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4c73511f Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4c73511f Branch: refs/heads/branch-3 Commit: 4c73511f3f59144fb8cc306117a1bf1f3d6dd071 Parents: 490041d Author: Vihang Karajgaonkar Authored: Mon Dec 17 17:13:56 2018 -0800 Committer: Vihang Karajgaonkar Committed: Wed Jan 2 11:16:18 2019 -0800 -- .../hive/ql/metadata/HiveMetaStoreChecker.java | 18 ++-- .../ql/metadata/TestHiveMetaStoreChecker.java | 106 +++ 2 files changed, 116 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/4c73511f/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java index 598bb2e..9339094 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.session.SessionState; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -470,10 +471,13 @@ public class HiveMetaStoreChecker { throws IOException, HiveException, InterruptedException { final Path currentPath = pd.p; final int currentDepth = pd.depth; + if (currentDepth == partColNames.size()) { +return currentPath; + } FileStatus[] fileStatuses = fs.listStatus(currentPath, FileUtils.HIDDEN_FILES_PATH_FILTER); // found no files under a sub-directory under table base path; it is possible that the table // is empty and hence there are no partition sub-directories created under base path - if (fileStatuses.length == 0 && currentDepth > 0 && currentDepth < partColNames.size()) { + if (fileStatuses.length == 0 && currentDepth > 0) { // since maxDepth is not yet reached, we are missing partition // columns in currentPath logOrThrowExceptionWithMsg( @@ -481,12 +485,12 @@ public class HiveMetaStoreChecker { } else { // found files under currentPath add them to the queue if it is a directory for (FileStatus fileStatus : fileStatuses) { - if (!fileStatus.isDirectory() && currentDepth < partColNames.size()) { + if (!fileStatus.isDirectory()) { // found a file at depth which is less than number of partition keys logOrThrowExceptionWithMsg( "MSCK finds a file rather than a directory when it searches for " + fileStatus.getPath().toString()); - } else if (fileStatus.isDirectory() && currentDepth < partColNames.size()) { + } else { // found a sub-directory at a depth less than number of partition keys // validate if the partition directory name matches with the corresponding // partition colName at currentDepth @@ -503,9 +507,6 @@ public class HiveMetaStoreChecker { } } } -if (currentDepth == partColNames.size()) { - return currentPath; -} } return null; } @@ -528,7 +529,8 @@ public class HiveMetaStoreChecker { } } - private void checkPartitionDirs(final ExecutorService executor, + @VisibleForTesting + void checkPartitionDirs(final ExecutorService executor, final Path basePath, final Set result, final FileSystem fs, final List partColNames) throws HiveException { try { @@ -559,7 +561,7 @@ public class HiveMetaStoreChecker { nextLevel = tempQueue; } } catch (InterruptedException | ExecutionException e) { - LOG.error(e.getMessage()); + LOG.error("Exception received while listing partition directories", e); executor.shutdownNow(); throw new HiveException(e.getCause()); } http://git-wip-us.apache.org/repos/asf/hive/blob/4c73511f/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java -- diff --git
hive git commit: HIVE-21040 : msck does unnecessary file listing at last level of directory tree (Vihang Karajgaonkar, reviewed by Prasanth Jayachandran)
Repository: hive Updated Branches: refs/heads/master e103abc3f -> 867a187bb HIVE-21040 : msck does unnecessary file listing at last level of directory tree (Vihang Karajgaonkar, reviewed by Prasanth Jayachandran) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/867a187b Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/867a187b Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/867a187b Branch: refs/heads/master Commit: 867a187bbd0b4d9fe8b567d8eb8e5a8fcd6afa9f Parents: e103abc Author: Vihang Karajgaonkar Authored: Mon Dec 17 17:13:56 2018 -0800 Committer: Vihang Karajgaonkar Committed: Thu Dec 20 18:47:43 2018 -0800 -- .../ql/metadata/TestHiveMetaStoreChecker.java | 3 + .../hive/metastore/HiveMetaStoreChecker.java| 18 +-- .../hive/metastore/TestMsckCheckPartitions.java | 138 +++ 3 files changed, 151 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/867a187b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java -- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java index 434d82a..520eb1b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java @@ -714,6 +714,9 @@ public class TestHiveMetaStoreChecker { private void createDirectory(String partPath) throws IOException { Path part = new Path(partPath); fs.mkdirs(part); +// create files under partitions to simulate real partitions +fs.createNewFile(new Path(partPath + Path.SEPARATOR + "dummydata1")); +fs.createNewFile(new Path(partPath + Path.SEPARATOR + "dummydata2")); fs.deleteOnExit(part); } } http://git-wip-us.apache.org/repos/asf/hive/blob/867a187b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java -- diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java index 2df45f6..6f4400a 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java @@ -45,6 +45,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ThreadFactory; +import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -474,10 +475,13 @@ public class HiveMetaStoreChecker { throws IOException, MetastoreException { final Path currentPath = pd.p; final int currentDepth = pd.depth; + if (currentDepth == partColNames.size()) { +return currentPath; + } FileStatus[] fileStatuses = fs.listStatus(currentPath, FileUtils.HIDDEN_FILES_PATH_FILTER); // found no files under a sub-directory under table base path; it is possible that the table // is empty and hence there are no partition sub-directories created under base path - if (fileStatuses.length == 0 && currentDepth > 0 && currentDepth < partColNames.size()) { + if (fileStatuses.length == 0 && currentDepth > 0) { // since maxDepth is not yet reached, we are missing partition // columns in currentPath logOrThrowExceptionWithMsg( @@ -485,12 +489,12 @@ public class HiveMetaStoreChecker { } else { // found files under currentPath add them to the queue if it is a directory for (FileStatus fileStatus : fileStatuses) { - if (!fileStatus.isDirectory() && currentDepth < partColNames.size()) { + if (!fileStatus.isDirectory()) { // found a file at depth which is less than number of partition keys logOrThrowExceptionWithMsg( "MSCK finds a file rather than a directory when it searches for " + fileStatus.getPath().toString()); - } else if (fileStatus.isDirectory() && currentDepth < partColNames.size()) { + } else { // found a sub-directory at a depth less than number of partition keys // validate if the partition directory name matches with the corresponding // partition colName at currentDepth
hive git commit: HIVE-20992 : Split the config hive.metastore.dbaccess.ssl.properties into more meaningful configs (Morio Ramdenbourg reviewed by Karthik Manamcheri and Vihang Karajgaonkar)
Repository: hive Updated Branches: refs/heads/master 4e415609c -> 4d9df0fdf HIVE-20992 : Split the config hive.metastore.dbaccess.ssl.properties into more meaningful configs (Morio Ramdenbourg reviewed by Karthik Manamcheri and Vihang Karajgaonkar) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4d9df0fd Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4d9df0fd Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4d9df0fd Branch: refs/heads/master Commit: 4d9df0fdf4f31adceacb67195497deddbf624103 Parents: 4e41560 Author: Morio Ramdenbourg Authored: Mon Dec 17 13:25:35 2018 -0800 Committer: Vihang Karajgaonkar Committed: Mon Dec 17 13:58:16 2018 -0800 -- .../hive/metastore/conf/MetastoreConf.java | 35 - .../hadoop/hive/metastore/ObjectStore.java | 78 +++- .../hadoop/hive/metastore/TestObjectStore.java | 74 +++ 3 files changed, 181 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/4d9df0fd/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java -- diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index e25a8cf..400097b 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -247,7 +247,9 @@ public class MetastoreConf { ConfVars.SSL_KEYSTORE_PASSWORD.varname, ConfVars.SSL_KEYSTORE_PASSWORD.hiveName, ConfVars.SSL_TRUSTSTORE_PASSWORD.varname, - ConfVars.SSL_TRUSTSTORE_PASSWORD.hiveName + ConfVars.SSL_TRUSTSTORE_PASSWORD.hiveName, + ConfVars.DBACCESS_SSL_TRUSTSTORE_PASSWORD.varname, + ConfVars.DBACCESS_SSL_TRUSTSTORE_PASSWORD.hiveName ); public static ConfVars getMetaConf(String name) { @@ -452,9 +454,26 @@ public class MetastoreConf { "Default transaction isolation level for identity generation."), DATANUCLEUS_USE_LEGACY_VALUE_STRATEGY("datanucleus.rdbms.useLegacyNativeValueStrategy", "datanucleus.rdbms.useLegacyNativeValueStrategy", true, ""), -DBACCESS_SSL_PROPS("metastore.dbaccess.ssl.properties", "hive.metastore.dbaccess.ssl.properties", "", -"Comma-separated SSL properties for metastore to access database when JDO connection URL\n" + -"enables SSL access. e.g. javax.net.ssl.trustStore=/tmp/truststore,javax.net.ssl.trustStorePassword=pwd."), + +// Parameters for configuring SSL encryption to the database store +// If DBACCESS_USE_SSL is false, then all other DBACCESS_SSL_* properties will be ignored + DBACCESS_SSL_TRUSTSTORE_PASSWORD("metastore.dbaccess.ssl.truststore.password", "hive.metastore.dbaccess.ssl.truststore.password", "", +"Password for the Java truststore file that is used when encrypting the connection to the database store. \n" ++ "This directly maps to the javax.net.ssl.trustStorePassword Java system property. \n" ++ "While Java does allow an empty truststore password, we highly recommend against this. \n" ++ "An empty password can compromise the integrity of the truststore file."), +DBACCESS_SSL_TRUSTSTORE_PATH("metastore.dbaccess.ssl.truststore.path", "hive.metastore.dbaccess.ssl.truststore.path", "", +"Location on disk of the Java truststore file to use when encrypting the connection to the database store. \n" ++ "This directly maps to the javax.net.ssl.trustStore Java system property. \n" ++ "This file consists of a collection of certificates trusted by the metastore server.\n"), +DBACCESS_SSL_TRUSTSTORE_TYPE("metastore.dbaccess.ssl.truststore.type", "hive.metastore.dbaccess.ssl.truststore.type", "jks", +new StringSetValidator("jceks", "jks", "dks", "pkcs11", "pkcs12"), +"File type for the Java truststore file that is used when encrypting the connection to the database store. \n" ++ "This directly maps to the javax.net.ssl.trustStoreType Java system property. \n" ++ "Types jceks, jks, dks, pkcs11, and pkcs12 can be read from Java 8 and beyond. We default to jks. \n"), +DBACCESS_USE_SSL("metastore.dbaccess.ssl.use.SSL", "hive.metastore.dbaccess.ssl.use.SSL", false, +"Set this to true to use SSL encryption to the database store."), + DEFAULTPARTITIONNAME("metastore.default.partition.name", "hive.exec.default.partition.name",
hive git commit: HIVE-21030 : Add credential store env properties redaction in JobConf (Denys Kuzmenko reviewed by Vihang Karajgaonkar)
Repository: hive Updated Branches: refs/heads/master 01ed46b4b -> 4e415609c HIVE-21030 : Add credential store env properties redaction in JobConf (Denys Kuzmenko reviewed by Vihang Karajgaonkar) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4e415609 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4e415609 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4e415609 Branch: refs/heads/master Commit: 4e415609ce333fd17c1dd5d4bf44ca9a3897ec42 Parents: 01ed46b Author: denys kuzmenko Authored: Fri Dec 14 13:29:03 2018 -0800 Committer: Vihang Karajgaonkar Committed: Fri Dec 14 13:29:41 2018 -0800 -- .../apache/hadoop/hive/conf/HiveConfUtil.java | 35 ++- .../ql/exec/TestHiveCredentialProviders.java| 36 2 files changed, 62 insertions(+), 9 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/4e415609/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java -- diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java index 2ad5f9e..ae6fa43 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java @@ -24,12 +24,14 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hive.common.util.HiveStringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; @@ -38,6 +40,7 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; +import java.util.stream.Stream; /** * Hive Configuration utils @@ -182,23 +185,37 @@ public class HiveConfUtil { String jobKeyStoreLocation = jobConf.get(HiveConf.ConfVars.HIVE_SERVER2_JOB_CREDENTIAL_PROVIDER_PATH.varname); String oldKeyStoreLocation = jobConf.get(Constants.HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG); + if (StringUtils.isNotBlank(jobKeyStoreLocation)) { jobConf.set(Constants.HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG, jobKeyStoreLocation); LOG.debug("Setting job conf credstore location to " + jobKeyStoreLocation + " previous location was " + oldKeyStoreLocation); } -String credStorepassword = getJobCredentialProviderPassword(jobConf); -if (credStorepassword != null) { - // if the execution engine is MR set the map/reduce env with the credential store password +String credstorePassword = getJobCredentialProviderPassword(jobConf); +if (credstorePassword != null) { String execEngine = jobConf.get(ConfVars.HIVE_EXECUTION_ENGINE.varname); + if ("mr".equalsIgnoreCase(execEngine)) { -addKeyValuePair(jobConf, JobConf.MAPRED_MAP_TASK_ENV, -Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, credStorepassword); -addKeyValuePair(jobConf, JobConf.MAPRED_REDUCE_TASK_ENV, -Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, credStorepassword); -addKeyValuePair(jobConf, "yarn.app.mapreduce.am.admin.user.env", -Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, credStorepassword); +// if the execution engine is MR set the map/reduce env with the credential store password + +Collection redactedProperties = + jobConf.getStringCollection(MRJobConfig.MR_JOB_REDACTED_PROPERTIES); + +Stream.of( +JobConf.MAPRED_MAP_TASK_ENV, +JobConf.MAPRED_REDUCE_TASK_ENV, +"yarn.app.mapreduce.am.admin.user.env") + +.forEach(property -> { + addKeyValuePair(jobConf, property, + Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, credstorePassword); + redactedProperties.add(property); +}); + +// Hide sensitive configuration values from MR HistoryUI by telling MR to redact the following list. +jobConf.set(MRJobConfig.MR_JOB_REDACTED_PROPERTIES, +StringUtils.join(redactedProperties, ",")); } } } http://git-wip-us.apache.org/repos/asf/hive/blob/4e415609/ql/src/test/org/apache/hadoop/hive/ql/exec/TestHiveCredentialProviders.java -- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestHiveCredentialProviders.java
hive git commit: HIVE-20860 : Fix or disable TestMiniLlapLocalCliDriver.testCliDriver[cbo_limit] (addendum)
Repository: hive Updated Branches: refs/heads/master c7b5454aa -> 01ed46b4b HIVE-20860 : Fix or disable TestMiniLlapLocalCliDriver.testCliDriver[cbo_limit] (addendum) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/01ed46b4 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/01ed46b4 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/01ed46b4 Branch: refs/heads/master Commit: 01ed46b4bb1ddc46c13f3daf3678e887fe74ee5c Parents: c7b5454 Author: Vihang Karajgaonkar Authored: Mon Dec 3 15:09:59 2018 -0800 Committer: Vihang Karajgaonkar Committed: Fri Dec 14 13:16:44 2018 -0800 -- .../main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/01ed46b4/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java -- diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java index 2017c94..ed8ae54 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java @@ -157,7 +157,6 @@ public class CliConfigs { includesFrom(testConfigProps, "minillap.query.files"); includesFrom(testConfigProps, "minillap.shared.query.files"); -excludeQuery("cbo_limit.q"); //Disabled in HIVE-20860 setResultsDir("ql/src/test/results/clientpositive/llap"); setLogDir("itests/qtest/target/qfile-results/clientpositive"); @@ -256,6 +255,7 @@ public class CliConfigs { excludeQuery("schema_evol_orc_acidvec_part.q"); // Disabled in HIVE-19509 excludeQuery("schema_evol_orc_vec_part_llap_io.q"); // Disabled in HIVE-19509 excludeQuery("load_dyn_part3.q"); // Disabled in HIVE-20662. Enable in HIVE-20663. +excludeQuery("cbo_limit.q"); //Disabled in HIVE-20860. Enable in HIVE-20972 setResultsDir("ql/src/test/results/clientpositive/llap"); setLogDir("itests/qtest/target/qfile-results/clientpositive");
hive git commit: HIVE-20740 : Remove global lock in ObjectStore.setConf method (Vihang Karajgaonkar, reviewed by Andrew Sherman and Naveen Gangam)
Repository: hive Updated Branches: refs/heads/branch-3 649c1c55b -> db8e9b0ef HIVE-20740 : Remove global lock in ObjectStore.setConf method (Vihang Karajgaonkar, reviewed by Andrew Sherman and Naveen Gangam) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/db8e9b0e Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/db8e9b0e Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/db8e9b0e Branch: refs/heads/branch-3 Commit: db8e9b0efd058c0a0bd18334f44608c93a84077e Parents: 649c1c5 Author: Vihang Karajgaonkar Authored: Thu Nov 29 15:07:42 2018 -0800 Committer: Vihang Karajgaonkar Committed: Thu Nov 29 15:13:23 2018 -0800 -- .../hive/ql/parse/TestReplicationScenarios.java | 4 +- .../apache/hive/jdbc/TestJdbcWithMiniHS2.java | 3 +- .../hadoop/hive/ql/session/SessionState.java| 3 +- .../hadoop/hive/metastore/ObjectStore.java | 447 ++- .../metastore/PersistenceManagerProvider.java | 537 +++ .../hadoop/hive/metastore/TestObjectStore.java | 56 +- 6 files changed, 640 insertions(+), 410 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/db8e9b0e/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java -- diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java index 4b6bc77..b3a19cb 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore; import org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore.BehaviourInjection; import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; -import org.apache.hadoop.hive.metastore.ObjectStore; +import org.apache.hadoop.hive.metastore.PersistenceManagerProvider; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.ForeignKeysRequest; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -179,7 +179,7 @@ public class TestReplicationScenarios { driverMirror = DriverFactory.newDriver(hconfMirror); metaStoreClientMirror = new HiveMetaStoreClient(hconfMirror); -ObjectStore.setTwoMetastoreTesting(true); +PersistenceManagerProvider.setTwoMetastoreTesting(true); } @AfterClass http://git-wip-us.apache.org/repos/asf/hive/blob/db8e9b0e/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java -- diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java index 099b67a..4d048ef 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java @@ -65,6 +65,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.ObjectStore; +import org.apache.hadoop.hive.metastore.PersistenceManagerProvider; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.UDF; @@ -1090,7 +1091,7 @@ public class TestJdbcWithMiniHS2 { NucleusContext nc = null; Map cMap; try { - pmf = ObjectStore.class.getDeclaredField("pmf"); + pmf = PersistenceManagerProvider.class.getDeclaredField("pmf"); if (pmf != null) { pmf.setAccessible(true); jdoPmf = (JDOPersistenceManagerFactory) pmf.get(null); http://git-wip-us.apache.org/repos/asf/hive/blob/db8e9b0e/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 6f39b03..844620f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -59,6 +59,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.conf.HiveConfUtil; import org.apache.hadoop.hive.metastore.ObjectStore; +import
hive git commit: HIVE-20740 : Remove global lock in ObjectStore.setConf method (Vihang Karajgaonkar, reviewed by Andrew Sherman and Naveen Gangam)
Repository: hive Updated Branches: refs/heads/master 75c6ee417 -> 8411ea567 HIVE-20740 : Remove global lock in ObjectStore.setConf method (Vihang Karajgaonkar, reviewed by Andrew Sherman and Naveen Gangam) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8411ea56 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8411ea56 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8411ea56 Branch: refs/heads/master Commit: 8411ea5675c3dc8488ce08a7a8245e35a20d07eb Parents: 75c6ee4 Author: Vihang Karajgaonkar Authored: Tue Oct 16 12:11:50 2018 -0700 Committer: Vihang Karajgaonkar Committed: Thu Nov 29 14:21:03 2018 -0800 -- .../hive/ql/parse/TestReplicationScenarios.java | 4 +- .../apache/hive/jdbc/TestJdbcWithMiniHS2.java | 3 +- .../hadoop/hive/ql/session/SessionState.java| 3 +- .../hadoop/hive/metastore/ObjectStore.java | 461 ++-- .../metastore/PersistenceManagerProvider.java | 543 +++ .../hadoop/hive/metastore/TestObjectStore.java | 60 +- 6 files changed, 650 insertions(+), 424 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/8411ea56/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java -- diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java index 5a88550..28910cf 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore; import org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore.BehaviourInjection; import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; -import org.apache.hadoop.hive.metastore.ObjectStore; +import org.apache.hadoop.hive.metastore.PersistenceManagerProvider; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.ForeignKeysRequest; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; @@ -199,7 +199,7 @@ public class TestReplicationScenarios { driverMirror = DriverFactory.newDriver(hconfMirror); metaStoreClientMirror = new HiveMetaStoreClient(hconfMirror); -ObjectStore.setTwoMetastoreTesting(true); +PersistenceManagerProvider.setTwoMetastoreTesting(true); } @AfterClass http://git-wip-us.apache.org/repos/asf/hive/blob/8411ea56/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java -- diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java index 5cb0a88..a2da15f 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java @@ -65,6 +65,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.ObjectStore; +import org.apache.hadoop.hive.metastore.PersistenceManagerProvider; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.UDF; @@ -1147,7 +1148,7 @@ public class TestJdbcWithMiniHS2 { NucleusContext nc = null; Map cMap; try { - pmf = ObjectStore.class.getDeclaredField("pmf"); + pmf = PersistenceManagerProvider.class.getDeclaredField("pmf"); if (pmf != null) { pmf.setAccessible(true); jdoPmf = (JDOPersistenceManagerFactory) pmf.get(null); http://git-wip-us.apache.org/repos/asf/hive/blob/8411ea56/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index e9565be..ff523cc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -60,6 +60,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.conf.HiveConfUtil; import org.apache.hadoop.hive.metastore.ObjectStore; +import
hive git commit: HIVE-20860 : Fix or disable TestMiniLlapLocalCliDriver.testCliDriver[cbo_limit] (Vihang Karajgaonkar, reviewed by Jesus Camacho Rodriguez)
Repository: hive Updated Branches: refs/heads/master 0a7cc714f -> e579e0683 HIVE-20860 : Fix or disable TestMiniLlapLocalCliDriver.testCliDriver[cbo_limit] (Vihang Karajgaonkar, reviewed by Jesus Camacho Rodriguez) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e579e068 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e579e068 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e579e068 Branch: refs/heads/master Commit: e579e0683622893103a740d8689071be1106964a Parents: 0a7cc71 Author: Vihang Karajgaonkar Authored: Mon Nov 26 15:12:46 2018 -0800 Committer: Vihang Karajgaonkar Committed: Mon Nov 26 17:57:24 2018 -0800 -- .../main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java| 1 + 1 file changed, 1 insertion(+) -- http://git-wip-us.apache.org/repos/asf/hive/blob/e579e068/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java -- diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java index df058ea..7ac7ba1 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java @@ -155,6 +155,7 @@ public class CliConfigs { includesFrom(testConfigProps, "minillap.query.files"); includesFrom(testConfigProps, "minillap.shared.query.files"); +excludeQuery("cbo_limit.q"); //Disabled in HIVE-20860 setResultsDir("ql/src/test/results/clientpositive/llap"); setLogDir("itests/qtest/target/qfile-results/clientpositive");
hive git commit: HIVE-20916 : Fix typo in JSONCreateDatabaseMessage and add test for alter database (Vihang Karajgaonkar, reviewed by Andrew Sherman and Bharathkrishna Guruvayoor Murali)
Repository: hive Updated Branches: refs/heads/branch-3 4663e50e7 -> ec89f32a3 HIVE-20916 : Fix typo in JSONCreateDatabaseMessage and add test for alter database (Vihang Karajgaonkar, reviewed by Andrew Sherman and Bharathkrishna Guruvayoor Murali) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ec89f32a Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ec89f32a Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ec89f32a Branch: refs/heads/branch-3 Commit: ec89f32a31f1623c97012829618f1ad79b741bd7 Parents: 4663e50 Author: Vihang Karajgaonkar Authored: Fri Nov 16 10:59:41 2018 -0800 Committer: Vihang Karajgaonkar Committed: Fri Nov 16 14:31:53 2018 -0800 -- .../listener/TestDbNotificationListener.java| 35 .../json/JSONCreateDatabaseMessage.java | 2 +- 2 files changed, 36 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/ec89f32a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java -- diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java index 82429e3..06f5d5c 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java @@ -77,6 +77,7 @@ import org.apache.hadoop.hive.metastore.events.ListenerEvent; import org.apache.hadoop.hive.metastore.events.AllocWriteIdEvent; import org.apache.hadoop.hive.metastore.events.AcidWriteEvent; import org.apache.hadoop.hive.metastore.messaging.AddPartitionMessage; +import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage; import org.apache.hadoop.hive.metastore.messaging.AlterPartitionMessage; import org.apache.hadoop.hive.metastore.messaging.AlterTableMessage; import org.apache.hadoop.hive.metastore.messaging.CreateDatabaseMessage; @@ -320,6 +321,7 @@ public class TestDbNotificationListener { // Parse the message field CreateDatabaseMessage createDbMsg = md.getCreateDatabaseMessage(event.getMessage()); assertEquals(dbName, createDbMsg.getDB()); +assertEquals(db, createDbMsg.getDatabaseObject()); // Verify the eventID was passed to the non-transactional listener MockMetaStoreEventListener.popAndVerifyLastEventId(EventType.CREATE_DATABASE, firstEventId + 1); @@ -339,6 +341,39 @@ public class TestDbNotificationListener { } @Test + public void alterDatabase() throws Exception { +String dbName = "alterdb"; +String dbLocationUri = "file:/tmp"; +String dbDescription = "no description"; +msClient.createDatabase(new Database(dbName, dbDescription, dbLocationUri, emptyParameters)); +// get the db for comparison below since it may include additional parameters +Database dbBefore = msClient.getDatabase(dbName); +// create alter database notification +String newDesc = "test database"; +Database dbAfter = dbBefore.deepCopy(); +dbAfter.setDescription(newDesc); +msClient.alterDatabase(dbName, dbAfter); +dbAfter = msClient.getDatabase(dbName); + +// Read notification from metastore +NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null); +assertEquals(2, rsp.getEventsSize()); +// check the contents of alter database notification +NotificationEvent event = rsp.getEvents().get(1); +assertEquals(firstEventId + 2, event.getEventId()); +assertTrue(event.getEventTime() >= startTime); +assertEquals(EventType.ALTER_DATABASE.toString(), event.getEventType()); +assertEquals(dbName, event.getDbName()); +assertNull(event.getTableName()); + +// Parse the message field +AlterDatabaseMessage alterDatabaseMessage = md.getAlterDatabaseMessage(event.getMessage()); +assertEquals(dbName, alterDatabaseMessage.getDB()); +assertEquals(dbBefore, alterDatabaseMessage.getDbObjBefore()); +assertEquals(dbAfter, alterDatabaseMessage.getDbObjAfter()); + } + + @Test public void dropDatabase() throws Exception { String dbName = "dropdb"; String dbName2 = "dropdb2"; http://git-wip-us.apache.org/repos/asf/hive/blob/ec89f32a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONCreateDatabaseMessage.java -- diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONCreateDatabaseMessage.java
hive git commit: HIVE-20916 : Fix typo in JSONCreateDatabaseMessage and add test for alter database (Vihang Karajgaonkar, reviewed by Andrew Sherman and Bharathkrishna Guruvayoor Murali)
Repository: hive Updated Branches: refs/heads/master c737dbd8f -> fd5f34fdd HIVE-20916 : Fix typo in JSONCreateDatabaseMessage and add test for alter database (Vihang Karajgaonkar, reviewed by Andrew Sherman and Bharathkrishna Guruvayoor Murali) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fd5f34fd Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fd5f34fd Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fd5f34fd Branch: refs/heads/master Commit: fd5f34fdd9be45b05f7cd7c771b01ff9dd1765fc Parents: c737dbd Author: Vihang Karajgaonkar Authored: Fri Nov 16 10:59:41 2018 -0800 Committer: Vihang Karajgaonkar Committed: Fri Nov 16 10:59:58 2018 -0800 -- .../listener/TestDbNotificationListener.java| 35 .../json/JSONCreateDatabaseMessage.java | 2 +- 2 files changed, 36 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/fd5f34fd/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java -- diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java index 3e404df..d4b7b02 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java @@ -81,6 +81,7 @@ import org.apache.hadoop.hive.metastore.events.ListenerEvent; import org.apache.hadoop.hive.metastore.events.AllocWriteIdEvent; import org.apache.hadoop.hive.metastore.events.AcidWriteEvent; import org.apache.hadoop.hive.metastore.messaging.AddPartitionMessage; +import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage; import org.apache.hadoop.hive.metastore.messaging.AlterPartitionMessage; import org.apache.hadoop.hive.metastore.messaging.AlterTableMessage; import org.apache.hadoop.hive.metastore.messaging.CreateDatabaseMessage; @@ -341,6 +342,7 @@ public class TestDbNotificationListener { // Parse the message field CreateDatabaseMessage createDbMsg = md.getCreateDatabaseMessage(event.getMessage()); assertEquals(dbName, createDbMsg.getDB()); +assertEquals(db, createDbMsg.getDatabaseObject()); // Verify the eventID was passed to the non-transactional listener MockMetaStoreEventListener.popAndVerifyLastEventId(EventType.CREATE_DATABASE, firstEventId + 1); @@ -364,6 +366,39 @@ public class TestDbNotificationListener { } @Test + public void alterDatabase() throws Exception { +String dbName = "alterdb"; +String dbLocationUri = testTempDir; +String dbDescription = "no description"; +msClient.createDatabase(new Database(dbName, dbDescription, dbLocationUri, emptyParameters)); +// get the db for comparison below since it may include additional parameters +Database dbBefore = msClient.getDatabase(dbName); +// create alter database notification +String newDesc = "test database"; +Database dbAfter = dbBefore.deepCopy(); +dbAfter.setDescription(newDesc); +msClient.alterDatabase(dbName, dbAfter); +dbAfter = msClient.getDatabase(dbName); + +// Read notification from metastore +NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null); +assertEquals(2, rsp.getEventsSize()); +// check the contents of alter database notification +NotificationEvent event = rsp.getEvents().get(1); +assertEquals(firstEventId + 2, event.getEventId()); +assertTrue(event.getEventTime() >= startTime); +assertEquals(EventType.ALTER_DATABASE.toString(), event.getEventType()); +assertEquals(dbName, event.getDbName()); +assertNull(event.getTableName()); + +// Parse the message field +AlterDatabaseMessage alterDatabaseMessage = md.getAlterDatabaseMessage(event.getMessage()); +assertEquals(dbName, alterDatabaseMessage.getDB()); +assertEquals(dbBefore, alterDatabaseMessage.getDbObjBefore()); +assertEquals(dbAfter, alterDatabaseMessage.getDbObjAfter()); + } + + @Test public void dropDatabase() throws Exception { String dbName = "dropdb"; String dbName2 = "dropdb2"; http://git-wip-us.apache.org/repos/asf/hive/blob/fd5f34fd/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONCreateDatabaseMessage.java -- diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONCreateDatabaseMessage.java
hive git commit: HIVE-16839 : Unbalanced calls to openTransaction/commitTransaction when alter the same partition concurrently (Guang Yang, reviewed by Karthik Manamcheri and Vihang Karajgaonkar)
Repository: hive Updated Branches: refs/heads/branch-3 6ed7ab6da -> cd4491900 HIVE-16839 : Unbalanced calls to openTransaction/commitTransaction when alter the same partition concurrently (Guang Yang, reviewed by Karthik Manamcheri and Vihang Karajgaonkar) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cd449190 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cd449190 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cd449190 Branch: refs/heads/branch-3 Commit: cd44919003392e068c65da8b656b9ffaee351b70 Parents: 6ed7ab6 Author: Vihang Karajgaonkar Authored: Thu Nov 8 11:31:49 2018 -0800 Committer: Vihang Karajgaonkar Committed: Thu Nov 8 11:31:49 2018 -0800 -- .../hadoop/hive/metastore/ObjectStore.java | 23 --- .../hadoop/hive/metastore/TestObjectStore.java | 68 2 files changed, 82 insertions(+), 9 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/cd449190/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java -- diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java index b4a4616..545b5bd 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -2482,15 +2482,20 @@ public class ObjectStore implements RawStore, Configurable { @Override public Partition getPartition(String catName, String dbName, String tableName, List part_vals) throws NoSuchObjectException, MetaException { -openTransaction(); -Partition part = convertToPart(getMPartition(catName, dbName, tableName, part_vals)); -commitTransaction(); -if(part == null) { - throw new NoSuchObjectException("partition values=" - + part_vals.toString()); -} -part.setValues(part_vals); -return part; +Partition part; +boolean committed = false; +try { + openTransaction(); + part = convertToPart(getMPartition(catName, dbName, tableName, part_vals)); + committed = commitTransaction(); + if (part == null) { +throw new NoSuchObjectException("partition values=" + part_vals.toString()); + } + part.setValues(part_vals); + return part; +} finally { + rollbackAndCleanup(committed, (Query)null); +} } private MPartition getMPartition(String catName, String dbName, String tableName, List part_vals) http://git-wip-us.apache.org/repos/asf/hive/blob/cd449190/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java -- diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java index ac35882..2d9c229 100644 --- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java +++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java @@ -65,6 +65,7 @@ import javax.jdo.Query; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.concurrent.BrokenBarrierException; @@ -647,6 +648,73 @@ public class TestObjectStore { } } + /** + * Test the concurrent drop of same partition would leak transaction. + * https://issues.apache.org/jira/browse/HIVE-16839 + * + * Note: the leak happens during a race condition, this test case tries + * to simulate the race condition on best effort, it have two threads trying + * to drop the same set of partitions + */ + @Test + public void testConcurrentDropPartitions() throws MetaException, InvalidObjectException { +Database db1 = new DatabaseBuilder() + .setName(DB1) + .setDescription("description") + .setLocation("locationurl") + .build(conf); +objectStore.createDatabase(db1); +StorageDescriptor sd = createFakeSd("location"); +HashMap tableParams = new HashMap<>(); +tableParams.put("EXTERNAL", "false"); +FieldSchema partitionKey1 = new FieldSchema("Country", ColumnType.STRING_TYPE_NAME, ""); +FieldSchema partitionKey2 = new FieldSchema("State", ColumnType.STRING_TYPE_NAME, ""); +Table tbl1 = + new Table(TABLE1, DB1, "owner", 1, 2, 3, sd, Arrays.asList(partitionKey1, partitionKey2), +tableParams, null, null, "MANAGED_TABLE"); +objectStore.createTable(tbl1); +HashMap
[2/2] hive git commit: Addendum : HIVE-16839 : Removed ObjectStore.java.orig
Addendum : HIVE-16839 : Removed ObjectStore.java.orig Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/dd1a3efe Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/dd1a3efe Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/dd1a3efe Branch: refs/heads/master Commit: dd1a3efe0dab1406758c130184b5a24462953ec0 Parents: 9bfff30 Author: Vihang Karajgaonkar Authored: Thu Nov 8 10:59:11 2018 -0800 Committer: Vihang Karajgaonkar Committed: Thu Nov 8 10:59:11 2018 -0800 -- .../hadoop/hive/metastore/ObjectStore.java.orig | 12818 - 1 file changed, 12818 deletions(-) --
[1/2] hive git commit: Addendum : HIVE-16839 : Removed ObjectStore.java.orig
Repository: hive Updated Branches: refs/heads/master 9bfff3012 -> dd1a3efe0 http://git-wip-us.apache.org/repos/asf/hive/blob/dd1a3efe/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java.orig -- diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java.orig b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java.orig deleted file mode 100644 index ba3acf9..000 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java.orig +++ /dev/null @@ -1,12818 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore; - -import static org.apache.commons.lang.StringUtils.join; -import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; -import static org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier; - -import java.io.IOException; -import java.lang.reflect.Field; -import java.net.InetAddress; -import java.net.URI; -import java.nio.ByteBuffer; -import java.sql.Connection; -import java.sql.SQLException; -import java.sql.SQLIntegrityConstraintViolationException; -import java.sql.Statement; -import java.time.LocalDateTime; -import java.time.format.DateTimeFormatter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Properties; -import java.util.Set; -import java.util.TreeSet; -import java.util.UUID; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; -import java.util.regex.Pattern; - -import javax.jdo.JDOCanRetryException; -import javax.jdo.JDODataStoreException; -import javax.jdo.JDOException; -import javax.jdo.JDOHelper; -import javax.jdo.JDOObjectNotFoundException; -import javax.jdo.PersistenceManager; -import javax.jdo.PersistenceManagerFactory; -import javax.jdo.Query; -import javax.jdo.Transaction; -import javax.jdo.datastore.DataStoreCache; -import javax.jdo.datastore.JDOConnection; -import javax.jdo.identity.IntIdentity; -import javax.sql.DataSource; - -import com.google.common.base.Joiner; -import com.google.common.base.Strings; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.exception.ExceptionUtils; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.common.*; -import org.apache.hadoop.hive.metastore.MetaStoreDirectSql.SqlFilterForPushdown; -import org.apache.hadoop.hive.metastore.api.*; -import org.apache.hadoop.hive.metastore.conf.MetastoreConf; -import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars; -import org.apache.hadoop.hive.metastore.datasource.DataSourceProvider; -import org.apache.hadoop.hive.metastore.datasource.DataSourceProviderFactory; -import org.apache.hadoop.hive.metastore.metrics.Metrics; -import org.apache.hadoop.hive.metastore.metrics.MetricsConstants; -import org.apache.hadoop.hive.metastore.model.*; -import org.apache.hadoop.hive.metastore.model.MWMMapping.EntityType; -import org.apache.hadoop.hive.metastore.model.MWMResourcePlan.Status; -import org.apache.hadoop.hive.metastore.parser.ExpressionTree; -import org.apache.hadoop.hive.metastore.parser.ExpressionTree.FilterBuilder; -import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; -import org.apache.hadoop.hive.metastore.tools.SQLGenerator; -import org.apache.hadoop.hive.metastore.txn.TxnUtils;
[3/3] hive git commit: HIVE-16839 : Unbalanced calls to openTransaction/commitTransaction when alter the same partition concurrently (Guang Yang, reviewed by Karthik Manamcheri and Vihang Karajgaonkar
HIVE-16839 : Unbalanced calls to openTransaction/commitTransaction when alter the same partition concurrently (Guang Yang, reviewed by Karthik Manamcheri and Vihang Karajgaonkar) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9bfff301 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9bfff301 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9bfff301 Branch: refs/heads/master Commit: 9bfff30128b3c1e7c14daca2f5e9b573cfede22f Parents: fc18e4a Author: Vihang Karajgaonkar Authored: Thu Nov 8 10:49:27 2018 -0800 Committer: Vihang Karajgaonkar Committed: Thu Nov 8 10:49:27 2018 -0800 -- .../hadoop/hive/metastore/ObjectStore.java |51 +- .../hadoop/hive/metastore/ObjectStore.java.orig | 12818 + .../hadoop/hive/metastore/TestObjectStore.java |67 + 3 files changed, 12914 insertions(+), 22 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/9bfff301/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java -- diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java index ba3acf9..570281b 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -2445,34 +2445,41 @@ public class ObjectStore implements RawStore, Configurable { List part_vals, String validWriteIds) throws NoSuchObjectException, MetaException { -openTransaction(); -MTable table = this.getMTable(catName, dbName, tableName); -MPartition mpart = getMPartition(catName, dbName, tableName, part_vals); -Partition part = convertToPart(mpart); -commitTransaction(); -if(part == null) { - throw new NoSuchObjectException("partition values=" +Partition part = null; +boolean committed = false; +try { + openTransaction(); + MTable table = this.getMTable(catName, dbName, tableName); + MPartition mpart = getMPartition(catName, dbName, tableName, part_vals); + part = convertToPart(mpart); + committed = commitTransaction(); + if (part == null) { +throw new NoSuchObjectException("partition values=" + part_vals.toString()); -} -part.setValues(part_vals); -// If transactional table partition, check whether the current version partition -// statistics in the metastore comply with the client query's snapshot isolation. -long statsWriteId = mpart.getWriteId(); -if (TxnUtils.isTransactionalTable(table.getParameters())) { - if (!areTxnStatsSupported) { -// Do not make persistent the following state since it is query specific (not global). -StatsSetupConst.setBasicStatsState(part.getParameters(), StatsSetupConst.FALSE); -LOG.info("Removed COLUMN_STATS_ACCURATE from Partition object's parameters."); - } else if (validWriteIds != null) { -if (isCurrentStatsValidForTheQuery(part, statsWriteId, validWriteIds, false)) { - part.setIsStatsCompliant(true); -} else { - part.setIsStatsCompliant(false); + } + + part.setValues(part_vals); + // If transactional table partition, check whether the current version partition + // statistics in the metastore comply with the client query's snapshot isolation. + long statsWriteId = mpart.getWriteId(); + if (TxnUtils.isTransactionalTable(table.getParameters())) { +if (!areTxnStatsSupported) { // Do not make persistent the following state since it is query specific (not global). StatsSetupConst.setBasicStatsState(part.getParameters(), StatsSetupConst.FALSE); LOG.info("Removed COLUMN_STATS_ACCURATE from Partition object's parameters."); +} else if (validWriteIds != null) { + if (isCurrentStatsValidForTheQuery(part, statsWriteId, validWriteIds, false)) { +part.setIsStatsCompliant(true); + } else { +part.setIsStatsCompliant(false); +// Do not make persistent the following state since it is query specific (not global). +StatsSetupConst.setBasicStatsState(part.getParameters(), StatsSetupConst.FALSE); +LOG.info("Removed COLUMN_STATS_ACCURATE from Partition object's parameters."); + } } } +} finally { + rollbackAndCleanup(committed, (Query)null); } return part; }
[1/3] hive git commit: HIVE-16839 : Unbalanced calls to openTransaction/commitTransaction when alter the same partition concurrently (Guang Yang, reviewed by Karthik Manamcheri and Vihang Karajgaonkar
Repository: hive Updated Branches: refs/heads/master fc18e4a19 -> 9bfff3012 http://git-wip-us.apache.org/repos/asf/hive/blob/9bfff301/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java -- diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java index b74c304..af9efd9 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java @@ -85,6 +85,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.concurrent.BrokenBarrierException; @@ -361,6 +362,72 @@ public class TestObjectStore { } /** + * Test the concurrent drop of same partition would leak transaction. + * https://issues.apache.org/jira/browse/HIVE-16839 + * + * Note: the leak happens during a race condition, this test case tries + * to simulate the race condition on best effort, it have two threads trying + * to drop the same set of partitions + */ + @Test + public void testConcurrentDropPartitions() throws MetaException, InvalidObjectException { +Database db1 = new DatabaseBuilder() + .setName(DB1) + .setDescription("description") + .setLocation("locationurl") + .build(conf); +objectStore.createDatabase(db1); +StorageDescriptor sd = createFakeSd("location"); +HashMap tableParams = new HashMap<>(); +tableParams.put("EXTERNAL", "false"); +FieldSchema partitionKey1 = new FieldSchema("Country", ColumnType.STRING_TYPE_NAME, ""); +FieldSchema partitionKey2 = new FieldSchema("State", ColumnType.STRING_TYPE_NAME, ""); +Table tbl1 = + new Table(TABLE1, DB1, "owner", 1, 2, 3, sd, Arrays.asList(partitionKey1, partitionKey2), +tableParams, null, null, "MANAGED_TABLE"); +objectStore.createTable(tbl1); +HashMap partitionParams = new HashMap<>(); +partitionParams.put("PARTITION_LEVEL_PRIVILEGE", "true"); + +// Create some partitions +List> partNames = new LinkedList<>(); +for (char c = 'A'; c < 'Z'; c++) { + String name = "" + c; + partNames.add(Arrays.asList(name, name)); +} +for (List n : partNames) { + Partition p = new Partition(n, DB1, TABLE1, 111, 111, sd, partitionParams); + p.setCatName(DEFAULT_CATALOG_NAME); + objectStore.addPartition(p); +} + +int numThreads = 2; +ExecutorService executorService = Executors.newFixedThreadPool(numThreads); +for (int i = 0; i < numThreads; i++) { + executorService.execute( +() -> { + for (List p : partNames) { +try { + objectStore.dropPartition(DEFAULT_CATALOG_NAME, DB1, TABLE1, p); + System.out.println("Dropping partition: " + p.get(0)); +} catch (Exception e) { + throw new RuntimeException(e); +} + } +} + ); +} + +executorService.shutdown(); +try { + executorService.awaitTermination(30, TimeUnit.SECONDS); +} catch (InterruptedException ex) { + Assert.assertTrue("Got interrupted.", false); +} +Assert.assertTrue("Expect no active transactions.", !objectStore.isActiveTransaction()); + } + + /** * Checks if the JDO cache is able to handle directSQL partition drops in one session. * @throws MetaException * @throws InvalidObjectException
[2/3] hive git commit: HIVE-16839 : Unbalanced calls to openTransaction/commitTransaction when alter the same partition concurrently (Guang Yang, reviewed by Karthik Manamcheri and Vihang Karajgaonkar
http://git-wip-us.apache.org/repos/asf/hive/blob/9bfff301/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java.orig -- diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java.orig b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java.orig new file mode 100644 index 000..ba3acf9 --- /dev/null +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java.orig @@ -0,0 +1,12818 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore; + +import static org.apache.commons.lang.StringUtils.join; +import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; +import static org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier; + +import java.io.IOException; +import java.lang.reflect.Field; +import java.net.InetAddress; +import java.net.URI; +import java.nio.ByteBuffer; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.SQLIntegrityConstraintViolationException; +import java.sql.Statement; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; +import java.util.Set; +import java.util.TreeSet; +import java.util.UUID; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; +import java.util.regex.Pattern; + +import javax.jdo.JDOCanRetryException; +import javax.jdo.JDODataStoreException; +import javax.jdo.JDOException; +import javax.jdo.JDOHelper; +import javax.jdo.JDOObjectNotFoundException; +import javax.jdo.PersistenceManager; +import javax.jdo.PersistenceManagerFactory; +import javax.jdo.Query; +import javax.jdo.Transaction; +import javax.jdo.datastore.DataStoreCache; +import javax.jdo.datastore.JDOConnection; +import javax.jdo.identity.IntIdentity; +import javax.sql.DataSource; + +import com.google.common.base.Joiner; +import com.google.common.base.Strings; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.ArrayUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.exception.ExceptionUtils; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.*; +import org.apache.hadoop.hive.metastore.MetaStoreDirectSql.SqlFilterForPushdown; +import org.apache.hadoop.hive.metastore.api.*; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars; +import org.apache.hadoop.hive.metastore.datasource.DataSourceProvider; +import org.apache.hadoop.hive.metastore.datasource.DataSourceProviderFactory; +import org.apache.hadoop.hive.metastore.metrics.Metrics; +import org.apache.hadoop.hive.metastore.metrics.MetricsConstants; +import org.apache.hadoop.hive.metastore.model.*; +import org.apache.hadoop.hive.metastore.model.MWMMapping.EntityType; +import org.apache.hadoop.hive.metastore.model.MWMResourcePlan.Status; +import org.apache.hadoop.hive.metastore.parser.ExpressionTree; +import org.apache.hadoop.hive.metastore.parser.ExpressionTree.FilterBuilder; +import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; +import org.apache.hadoop.hive.metastore.tools.SQLGenerator; +import org.apache.hadoop.hive.metastore.txn.TxnUtils; +import org.apache.hadoop.hive.metastore.utils.FileUtils; +import
[2/2] hive git commit: HIVE-20307 : Add support for filterspec to the getPartitions with projection API (Vihang Karajgaonkar, reviewed by Andrew Sherman)
HIVE-20307 : Add support for filterspec to the getPartitions with projection API (Vihang Karajgaonkar, reviewed by Andrew Sherman) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e39a1980 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e39a1980 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e39a1980 Branch: refs/heads/master Commit: e39a19801abf7b2a711883945e8c7a9e3551a09d Parents: dc8d8e1 Author: Vihang Karajgaonkar Authored: Wed Aug 22 20:29:23 2018 -0700 Committer: Vihang Karajgaonkar Committed: Tue Oct 16 14:15:45 2018 -0700 -- .../listener/DummyRawStoreFailEvent.java| 12 +- .../ql/metadata/SessionHiveMetaStoreClient.java | 2 +- .../hive/metastore/utils/MetaStoreUtils.java| 6 +- .../hadoop/hive/metastore/HiveMetaStore.java| 20 +- .../hive/metastore/MetaStoreDirectSql.java | 53 +- .../hadoop/hive/metastore/ObjectStore.java | 173 +++- .../apache/hadoop/hive/metastore/RawStore.java | 51 +- .../hive/metastore/cache/CachedStore.java | 11 +- .../DummyRawStoreControlledCommit.java | 22 +- .../DummyRawStoreForJdoConnection.java | 6 +- .../TestGetPartitionsUsingProjection.java | 700 -- ...PartitionsUsingProjectionAndFilterSpecs.java | 904 +++ 12 files changed, 1146 insertions(+), 814 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/e39a1980/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java -- diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java index d59d5d8..c3e1e8e 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java @@ -19,6 +19,8 @@ package org.apache.hive.hcatalog.listener; import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.metastore.api.GetPartitionsFilterSpec; +import org.apache.hadoop.hive.metastore.api.GetPartitionsProjectionSpec; import org.apache.hadoop.hive.metastore.api.ISchemaName; import org.apache.hadoop.hive.metastore.api.SchemaVersionDescriptor; import org.apache.hadoop.hive.metastore.api.Catalog; @@ -413,12 +415,10 @@ public class DummyRawStoreFailEvent implements RawStore, Configurable { } @Override - public List getPartitionSpecsByFilterAndProjection(String catalog, -String dbName, String tblName, -List fieldList, String includeParamKeyPattern, - String excludeParamKeyPattern) throws MetaException, NoSuchObjectException { -return objectStore.getPartitionSpecsByFilterAndProjection(catalog, dbName, tblName, fieldList, -includeParamKeyPattern, excludeParamKeyPattern); + public List getPartitionSpecsByFilterAndProjection(Table table, + GetPartitionsProjectionSpec projectionSpec, GetPartitionsFilterSpec filterSpec) + throws MetaException, NoSuchObjectException { +return objectStore.getPartitionSpecsByFilterAndProjection(table, projectionSpec, filterSpec); } @Override http://git-wip-us.apache.org/repos/asf/hive/blob/e39a1980/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java index a2b57fb..dd23d7d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java @@ -948,7 +948,7 @@ public class SessionHiveMetaStoreClient extends HiveMetaStoreClient implements I * */ private List getPartitions(List partialPartVals) throws MetaException { -String partNameMatcher = MetaStoreUtils.makePartNameMatcher(tTable, partialPartVals); +String partNameMatcher = MetaStoreUtils.makePartNameMatcher(tTable, partialPartVals, ".*"); List matchedPartitions = new ArrayList<>(); for(String key : parts.keySet()) { if(key.matches(partNameMatcher)) { http://git-wip-us.apache.org/repos/asf/hive/blob/e39a1980/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
[1/2] hive git commit: HIVE-20307 : Add support for filterspec to the getPartitions with projection API (Vihang Karajgaonkar, reviewed by Andrew Sherman)
Repository: hive Updated Branches: refs/heads/master dc8d8e134 -> e39a19801 http://git-wip-us.apache.org/repos/asf/hive/blob/e39a1980/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestGetPartitionsUsingProjectionAndFilterSpecs.java -- diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestGetPartitionsUsingProjectionAndFilterSpecs.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestGetPartitionsUsingProjectionAndFilterSpecs.java new file mode 100644 index 000..bc43f3d --- /dev/null +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestGetPartitionsUsingProjectionAndFilterSpecs.java @@ -0,0 +1,904 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore; + +import org.apache.commons.beanutils.PropertyUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.GetPartitionsFilterSpec; +import org.apache.hadoop.hive.metastore.api.GetPartitionsProjectionSpec; +import org.apache.hadoop.hive.metastore.api.GetPartitionsRequest; +import org.apache.hadoop.hive.metastore.api.GetPartitionsResponse; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PartitionFilterMode; +import org.apache.hadoop.hive.metastore.api.PartitionListComposingSpec; +import org.apache.hadoop.hive.metastore.api.PartitionSpec; +import org.apache.hadoop.hive.metastore.api.PartitionSpecWithSharedSD; +import org.apache.hadoop.hive.metastore.api.PartitionWithoutSD; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder; +import org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder; +import org.apache.hadoop.hive.metastore.client.builder.TableBuilder; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars; +import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge; +import org.apache.thrift.TException; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.lang.reflect.InvocationTargetException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import static org.apache.hadoop.hive.metastore.ColumnType.SERIALIZATION_FORMAT; + +/** + * Tests for getPartitionsWithSpecs metastore API. This test create some partitions and makes sure + * that getPartitionsWithSpecs returns results which are comparable with the get_partitions API when + * various combinations of projection spec are set. Also checks the JDO code path in addition to + * directSQL code path + */ +@Category(MetastoreCheckinTest.class) +public class TestGetPartitionsUsingProjectionAndFilterSpecs { + private static final Logger LOG = LoggerFactory.getLogger(TestGetPartitionsUsingProjectionAndFilterSpecs.class); + protected static Configuration conf = MetastoreConf.newMetastoreConf(); + private static int port; + private static final String dbName = "test_projection_db"; + private static final String tblName = "test_projection_table"; + private List origPartitions; + private Table tbl; + private static final String EXCLUDE_KEY_PREFIX = "exclude"; + private HiveMetaStoreClient client; + + @BeforeClass + public static void startMetaStoreServer() throws Exception { +conf.set("hive.in.test", "true"); +
[3/3] hive git commit: HIVE-20306 : Addendum to remove .orig files
HIVE-20306 : Addendum to remove .orig files Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/64bef36a Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/64bef36a Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/64bef36a Branch: refs/heads/master Commit: 64bef36a36bcbb3dd644ffdc394673fdad41eb93 Parents: 390afb5 Author: Vihang Karajgaonkar Authored: Wed Oct 10 10:28:31 2018 -0700 Committer: Vihang Karajgaonkar Committed: Wed Oct 10 10:40:13 2018 -0700 -- .../hive/metastore/MetaStoreDirectSql.java.orig | 2845 .../hadoop/hive/metastore/ObjectStore.java.orig | 12514 - 2 files changed, 15359 deletions(-) --