This is an automated email from the ASF dual-hosted git repository.
difin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 83b49df2b02 HIVE-28783: Refactor hardcoded IPv4 addresses to support
dual-stack (IPv4/IPv6) networking (#5686) (Dmitriy Fingerman, reviewed by Ayush
Saxena)
83b49df2b02 is described below
commit 83b49df2b0274aba60ebd7f826997f2c057182c4
Author: Dmitriy Fingerman <[email protected]>
AuthorDate: Tue Mar 25 21:31:35 2025 -0400
HIVE-28783: Refactor hardcoded IPv4 addresses to support dual-stack
(IPv4/IPv6) networking (#5686) (Dmitriy Fingerman, reviewed by Ayush Saxena)
* HIVE-28783: Refactor hardcoded IPv4 addresses to support dual-stack
(IPv4/IPv6) networking
---------
Co-authored-by: Dmitriy Fingerman <[email protected]>
---
.../hadoop/hive/common/auth/HiveAuthUtils.java | 3 +-
.../java/org/apache/hive/common/IPStackUtils.java | 222 +++++++++++++++++++++
.../org/apache/hive/common/IPStackUtilsTest.java | 183 +++++++++++++++++
.../hadoop/hive/contrib/serde2/TestRegexSerDe.java | 8 +-
.../hadoop/hive/hbase/HBaseStorageHandler.java | 8 +-
.../hadoop/hive/hbase/TestHBaseStorageHandler.java | 3 +-
.../java/org/apache/hive/hcatalog/MiniCluster.java | 5 +-
.../org/apache/hive/hcatalog/templeton/Main.java | 3 +-
.../templeton/tool/TestTempletonUtils.java | 10 +-
.../org/apache/iceberg/mr/hive/TestHiveShell.java | 3 +-
.../hive/hcatalog/hbase/ManyMiniCluster.java | 3 +-
.../hive/minikdc/TestHs2HooksWithMiniKdc.java | 8 +-
.../metastore/security/TestHadoopAuthBridge23.java | 4 +-
.../org/apache/hadoop/hive/hooks/TestHs2Hooks.java | 8 +-
.../hive/metastore/TestReplChangeManager.java | 4 +-
.../hadoop/hive/ql/TestWarehouseDnsPath.java | 4 +-
.../thrift/TestThriftHttpCLIServiceFeatures.java | 7 +-
.../hadoop/hive/kafka/KafkaBrokerResource.java | 9 +-
.../helpers/LlapTaskUmbilicalServer.java | 7 +-
.../llap/daemon/services/impl/LlapWebServices.java | 3 +-
.../llap/shufflehandler/TestShuffleHandler.java | 9 +-
.../hive/llap/tezplugins/LlapTaskCommunicator.java | 3 +-
.../org/apache/hadoop/hive/ql/TestTxnCommands.java | 5 +-
.../hive/serde2/TestTCTLSeparatedProtocol.java | 7 +-
.../service/auth/saml/HiveSamlHttpServlet.java | 4 +-
.../hive/service/auth/saml/HiveSamlUtils.java | 9 +-
.../apache/hive/service/server/HiveServer2.java | 4 +-
.../cli/TestCLIServiceConnectionLimits.java | 55 +++--
.../cli/TestRetryingThriftCLIServiceClient.java | 5 +-
.../cli/operation/TestOperationLogManager.java | 3 +-
.../cli/session/TestSessionManagerMetrics.java | 15 +-
.../hive/service/server/TestHS2HttpServer.java | 13 +-
.../hive/metastore/TestHiveMetaStoreTimeout.java | 3 +-
33 files changed, 547 insertions(+), 93 deletions(-)
diff --git
a/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java
b/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java
index 68163afa42b..3e17cdd10fc 100644
--- a/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java
@@ -33,6 +33,7 @@
import com.google.common.base.Splitter;
import com.google.common.collect.Sets;
+import org.apache.hive.common.IPStackUtils;
import org.apache.thrift.transport.TSSLTransportFactory;
import
org.apache.thrift.transport.TSSLTransportFactory.TSSLTransportParameters;
import org.apache.thrift.transport.TServerSocket;
@@ -142,7 +143,7 @@ private static TSocket getSSLSocketWithHttps(TSocket
tSSLSocket, int maxMessageS
throws TTransportException {
SSLSocket sslSocket = (SSLSocket) tSSLSocket.getSocket();
SSLParameters sslParams = sslSocket.getSSLParameters();
- if (sslSocket.getLocalAddress().getHostAddress().equals("127.0.0.1")) {
+ if
(IPStackUtils.isActiveStackLoopbackIP(sslSocket.getLocalAddress().getHostAddress()))
{
sslParams.setEndpointIdentificationAlgorithm(null);
} else {
sslParams.setEndpointIdentificationAlgorithm("HTTPS");
diff --git a/common/src/java/org/apache/hive/common/IPStackUtils.java
b/common/src/java/org/apache/hive/common/IPStackUtils.java
new file mode 100644
index 00000000000..7b24b8a0781
--- /dev/null
+++ b/common/src/java/org/apache/hive/common/IPStackUtils.java
@@ -0,0 +1,222 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common;
+
+import com.google.common.annotations.VisibleForTesting;
+import io.netty.util.NetUtil;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+public class IPStackUtils {
+
+ public static final String WILDCARD_ADDRESS_IPV4 = "0.0.0.0";
+ public static final String LOOPBACK_ADDRESS_IPV4 = "127.0.0.1";
+
+ public static final List<String> WILDCARD_ADDRESSES_IPV6 =
Collections.unmodifiableList(
+ Arrays.asList("::", "0:0:0:0:0:0:0:0"));
+ public static final List<String> LOOPBACK_ADDRESSES_IPV6 =
Collections.unmodifiableList(
+ Arrays.asList("::1", "0:0:0:0:0:0:0:1"));
+
+ private static boolean preferIPv4Stack = NetUtil.isIpV4StackPreferred();
+ private static boolean preferIPv6Addresses =
NetUtil.isIpV6AddressesPreferred();
+
+ private IPStackUtils() {
+ }
+
+ @VisibleForTesting
+ static void setPreferIPv4Stack(boolean preferIPv4Stack) {
+ IPStackUtils.preferIPv4Stack = preferIPv4Stack;
+ }
+
+ @VisibleForTesting
+ static void setPreferIPv6Addresses(boolean preferIPv6Addresses) {
+ IPStackUtils.preferIPv6Addresses = preferIPv6Addresses;
+ }
+
+ /**
+ * Get the IPv4 or IPv6 wildcard address for binding on all network
interfaces,
+ * depending on Java properties.
+ * @return the wildcard address
+ */
+ public static String resolveWildcardAddress() {
+ if (preferIPv4Stack) {
+ // IPv6 stack is completely disabled on Java side
+ return WILDCARD_ADDRESS_IPV4;
+ } else if (preferIPv6Addresses) {
+ // Dual stack is enabled, and IPv6 addresses are preferred
+ return WILDCARD_ADDRESSES_IPV6.get(0);
+ } else {
+ // Dual stack is enabled, and IPv6 addresses are not preferred
+ return WILDCARD_ADDRESS_IPV4;
+ }
+ }
+
+ /**
+ * Concats the IPv4 or IPv6 wildcard address depending on the preferred
stack with the specified port.
+ * @return the wildcard address and port string
+ */
+ public static String concatWildcardAddressPort(int port) {
+ return concatHostPort(resolveWildcardAddress(), port);
+ }
+
+ /**
+ * Adapts provided wildcard address for the active IP Stack. If the provided
is IPv4 wildcard address, and the
+ * active stack is IPv6, returns IPv6 wildcard address, and vice versa. If
the provided address is not a wildcard
+ * address, returns back provided address.
+ * @param hostname An ip address or hostname
+ * @return the updated wildcard address or the provided address
+ */
+ public static String adaptWildcardAddress(String hostname) {
+ if (WILDCARD_ADDRESS_IPV4.equals(hostname) ||
WILDCARD_ADDRESSES_IPV6.contains(hostname)) {
+ // The provided address is a wildcard address, return the wildcard
address for the active IP stack
+ return resolveWildcardAddress();
+ } else {
+ return hostname;
+ }
+ }
+
+ /**
+ * Adapts provided loopback address for the active IP Stack. If the provided
is IPv4 loopback address, and the
+ * active stack is IPv6, returns IPv6 loopback address, and vice versa. If
the provided address is not a loopback
+ * address, returns back provided address.
+ * @param hostname An ip address or hostname
+ * @return the updated wildcard address or the provided address
+ */
+ public static String adaptLoopbackAddress(String hostname) {
+ if (LOOPBACK_ADDRESS_IPV4.equals(hostname) ||
LOOPBACK_ADDRESSES_IPV6.contains(hostname)) {
+ // The provided address is a loopback address, return the loopback
address for the active IP stack
+ return resolveLoopbackAddress();
+ } else {
+ return hostname;
+ }
+ }
+
+ /**
+ * Get the IPv4 or IPv6 loopback address depending on Java properties.
+ * @return the loopback address
+ */
+ public static String resolveLoopbackAddress() {
+ if (preferIPv4Stack) {
+ // IPv6 stack is completely disabled on Java side
+ return LOOPBACK_ADDRESS_IPV4;
+ } else if (preferIPv6Addresses) {
+ // Dual stack is enabled, and IPv6 addresses are preferred
+ return LOOPBACK_ADDRESSES_IPV6.get(0);
+ } else {
+ // Dual stack is enabled, and IPv6 addresses are not preferred
+ return LOOPBACK_ADDRESS_IPV4;
+ }
+ }
+
+ /**
+ * Check if the provided IP address is a loopback interface for the active
IP stack.
+ * @return boolean
+ */
+ public static boolean isActiveStackLoopbackIP(String ipAddress) {
+ if (preferIPv4Stack) {
+ // IPv6 stack is completely disabled on Java side
+ return LOOPBACK_ADDRESS_IPV4.equals(ipAddress);
+ } else if (preferIPv6Addresses) {
+ // Dual stack is enabled, and IPv6 addresses are preferred
+ return LOOPBACK_ADDRESSES_IPV6.contains(ipAddress);
+ } else {
+ // Dual stack is enabled, and IPv6 addresses are not preferred
+ return LOOPBACK_ADDRESS_IPV4.equals(ipAddress);
+ }
+ }
+
+ /**
+ * Concatenates the IPv4 or IPv6 loopback address depending on the preferred
stack with the specified port.
+ * @return the wildcard address and port string
+ */
+ public static String concatLoopbackAddressPort(int port) {
+ return concatHostPort(resolveLoopbackAddress(), port);
+ }
+
+ /**
+ * Concatenates the host and port with a colon.
+ * If the host is an IPv6 address, it is enclosed in square brackets.
+ * @param host the host
+ * @param port the port
+ * @return the concatenated host and port
+ */
+ public static String concatHostPort(String host, int port) {
+ return formatIPAddressForURL(host) + ":" + port;
+ }
+
+ /**
+ * Prepares an IP address for use in a URL.
+ * <p>
+ * This method ensures that IPv6 addresses are enclosed in square brackets,
+ * as required by URL syntax. IPv4 addresses and hostnames remain unchanged.
+ * </p>
+ *
+ * @param ipAddress the IP address or hostname to format
+ * @return the formatted IP address for use in a URL
+ */
+ public static String formatIPAddressForURL(String ipAddress) {
+ if (ipAddress.contains(":") && !ipAddress.startsWith("[") &&
!ipAddress.endsWith("]")) {
+ // IPv6 address
+ return "[" + ipAddress + "]";
+ } else {
+ // IPv4 address or hostname
+ return ipAddress;
+ }
+ }
+
+ /**
+ * If the provided address is an IPv4 address, transforms it to IPv6,
otherwise returns the provided address.
+ * Used in some tests which use hardcoded IPv4 addresses that need to be
IPv6 when active stack is IPv6.
+ * @param ipv4 An IPv4 address
+ * @return the transformed IPv4 address
+ */
+ public static String transformToIPv6(String ipv4) {
+ if (NetUtil.isValidIpV4Address(ipv4)) {
+ try {
+ return InetAddress.getByName("::ffff:" + ipv4).getHostAddress();
+ } catch (UnknownHostException e) {
+ throw new RuntimeException(e);
+ }
+ } else {
+ return ipv4;
+ }
+ }
+
+ /**
+ * Concatenates the host transformed to IPv6 and port with a colon.
+ * Used in some tests which use hardcoded IPv4 addresses and ports that need
to be IPv6 when active stack is IPv6.
+ * @param ipv4 An IPv4 address
+ * @param port port
+ * @return the concatenated and transformed to IPv6 host and port
+ */
+ public static String transformToIPv6(String ipv4, int port) {
+ if (NetUtil.isValidIpV4Address(ipv4)) {
+ try {
+ return concatHostPort(InetAddress.getByName("::ffff:" +
ipv4).getHostAddress(), port);
+ } catch (UnknownHostException e) {
+ throw new RuntimeException(e);
+ }
+ } else {
+ return ipv4;
+ }
+ }
+}
diff --git a/common/src/test/org/apache/hive/common/IPStackUtilsTest.java
b/common/src/test/org/apache/hive/common/IPStackUtilsTest.java
new file mode 100644
index 00000000000..4c89c0e9069
--- /dev/null
+++ b/common/src/test/org/apache/hive/common/IPStackUtilsTest.java
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common;
+
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
+
+class IPStackUtilsTest {
+
+ @Test
+ void testIPv4LoopbackWhenIPv4StackIsForced() {
+ IPStackUtils.setPreferIPv4Stack(true);
+ IPStackUtils.setPreferIPv6Addresses(false);
+
+ String loopback = IPStackUtils.resolveLoopbackAddress();
+ assertEquals(IPStackUtils.LOOPBACK_ADDRESS_IPV4, loopback);
+ }
+
+ @Test
+ void testIPv6LoopbackWhenIPv6IsPreferred() {
+ IPStackUtils.setPreferIPv4Stack(false);
+ IPStackUtils.setPreferIPv6Addresses(true);
+
+ String loopback = IPStackUtils.resolveLoopbackAddress();
+ assertEquals(IPStackUtils.LOOPBACK_ADDRESSES_IPV6.get(0), loopback);
+ }
+
+ @Test
+ void testIPv4LoopbackWhenIPv6IsNotPreferred() {
+ IPStackUtils.setPreferIPv4Stack(false);
+ IPStackUtils.setPreferIPv6Addresses(false);
+
+ String loopback = IPStackUtils.resolveLoopbackAddress();
+ assertEquals(IPStackUtils.LOOPBACK_ADDRESS_IPV4, loopback);
+ }
+
+ @Test
+ void testIPv4WildcardWhenIPv4StackIsForced() {
+ IPStackUtils.setPreferIPv4Stack(true);
+ IPStackUtils.setPreferIPv6Addresses(false);
+
+ String wildcard = IPStackUtils.resolveWildcardAddress();
+ assertEquals(IPStackUtils.WILDCARD_ADDRESS_IPV4, wildcard);
+ }
+
+ @Test
+ void testIPv6WildcardWhenIPv6IsPreferred() {
+ IPStackUtils.setPreferIPv4Stack(false);
+ IPStackUtils.setPreferIPv6Addresses(true);
+
+ String wildcard = IPStackUtils.resolveWildcardAddress();
+ assertEquals(IPStackUtils.WILDCARD_ADDRESSES_IPV6.get(0), wildcard);
+ }
+
+ @Test
+ void testIPv4WildcardWhenIPv6IsNotPreferred() {
+ IPStackUtils.setPreferIPv4Stack(false);
+ IPStackUtils.setPreferIPv6Addresses(false);
+
+ String wildcard = IPStackUtils.resolveWildcardAddress();
+ assertEquals(IPStackUtils.WILDCARD_ADDRESS_IPV4, wildcard);
+ }
+
+ @Test
+ void testConcatHostPortIPv4Host() {
+ assertEquals("192.168.1.1:8080",
IPStackUtils.concatHostPort("192.168.1.1", 8080));
+ }
+
+ @Test
+ void testConcatHostPortIPv6Host() {
+ assertEquals("[2001:db8::1]:8080",
IPStackUtils.concatHostPort("2001:db8::1", 8080));
+ }
+
+ @Test
+ void testConcatHostPortIPv6Loopback() {
+ assertEquals("[::1]:9090", IPStackUtils.concatHostPort("::1", 9090));
+ }
+
+ @Test
+ void testConcatHostPortHostname() {
+ assertEquals("example.com:443", IPStackUtils.concatHostPort("example.com",
443));
+ }
+
+ @Test
+ void testConcatHostPortLoobackIPv4() {
+ assertEquals("127.0.0.1:3306", IPStackUtils.concatHostPort("127.0.0.1",
3306));
+ }
+
+ @Test
+ void testConcatHostPortLoopbackIPv6() {
+ assertEquals("[::1]:3306", IPStackUtils.concatHostPort("::1", 3306));
+ }
+
+ @Test
+ void testWildcardWhenIPv4StackIsForcedAndIPv4WildcardProvided() {
+ IPStackUtils.setPreferIPv4Stack(true);
+ IPStackUtils.setPreferIPv6Addresses(false);
+
+ String result =
IPStackUtils.adaptWildcardAddress(IPStackUtils.WILDCARD_ADDRESS_IPV4);
+ assertEquals(IPStackUtils.WILDCARD_ADDRESS_IPV4, result);
+ }
+
+ @Test
+ void testWildcardWhenIPv4StackIsForcedAndIPv6WildcardProvided() {
+ IPStackUtils.setPreferIPv4Stack(true);
+ IPStackUtils.setPreferIPv6Addresses(false);
+
+ String result =
IPStackUtils.adaptWildcardAddress(IPStackUtils.WILDCARD_ADDRESSES_IPV6.get(0));
+ assertEquals(IPStackUtils.WILDCARD_ADDRESS_IPV4, result);
+ }
+
+
+ @Test
+ void testWildcardWhenIPv6IsPreferredAndIPv6WildcardProvided() {
+ IPStackUtils.setPreferIPv4Stack(false);
+ IPStackUtils.setPreferIPv6Addresses(true);
+
+ String result =
IPStackUtils.adaptWildcardAddress(IPStackUtils.WILDCARD_ADDRESSES_IPV6.get(0));
+ assertEquals(IPStackUtils.WILDCARD_ADDRESSES_IPV6.get(0), result);
+ }
+
+ @Test
+ void testWildcardWhenIPv6IsPreferredAndIPv4WildcardProvided() {
+ IPStackUtils.setPreferIPv4Stack(false);
+ IPStackUtils.setPreferIPv6Addresses(true);
+
+ String result =
IPStackUtils.adaptWildcardAddress(IPStackUtils.WILDCARD_ADDRESS_IPV4);
+ assertEquals(IPStackUtils.WILDCARD_ADDRESSES_IPV6.get(0), result);
+ }
+
+ @Test
+ void testWildcardWhenIPv6IsNotPreferredAndIPv4WildcardProvided() {
+ IPStackUtils.setPreferIPv4Stack(false);
+ IPStackUtils.setPreferIPv6Addresses(false);
+
+ String result =
IPStackUtils.adaptWildcardAddress(IPStackUtils.WILDCARD_ADDRESS_IPV4);
+ assertEquals(IPStackUtils.WILDCARD_ADDRESS_IPV4, result);
+ }
+
+ @Test
+ void testWildcardWhenIPv6IsNotPreferredAndIPv6WildcardProvided() {
+ IPStackUtils.setPreferIPv4Stack(false);
+ IPStackUtils.setPreferIPv6Addresses(false);
+
+ String result =
IPStackUtils.adaptWildcardAddress(IPStackUtils.WILDCARD_ADDRESSES_IPV6.get(0));
+ assertEquals(IPStackUtils.WILDCARD_ADDRESS_IPV4, result);
+ }
+
+ @Test
+ void testWildcardWhenNonWildcardIPv4AddressProvided() {
+ String result = IPStackUtils.adaptWildcardAddress("192.168.1.1");
+ assertEquals("192.168.1.1", result);
+ }
+
+ @Test
+ void testWildcardWhenNonWildcardIPv6AddressProvided() {
+ String result = IPStackUtils.adaptWildcardAddress("2001:db8::1");
+ assertEquals("2001:db8::1", result);
+ }
+
+ @Test
+ void testWildcardWhenHostnameIsProvided() {
+ String result = IPStackUtils.adaptWildcardAddress("example.com");
+ assertEquals("example.com", result);
+ }
+}
\ No newline at end of file
diff --git
a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
b/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
index 2518ef0e001..67da566b51b 100644
--- a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
+++ b/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
@@ -28,6 +28,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.io.Text;
+import org.apache.hive.common.IPStackUtils;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
@@ -65,11 +66,12 @@ public void testRegexSerDe() throws Throwable {
"%1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s");
// Data
- Text t = new Text(
- "127.0.0.1 - - [26/May/2009:00:00:00 +0000] "
+ Text t = new Text(String.format(
+ "%s - - [26/May/2009:00:00:00 +0000] "
+ "\"GET /someurl/?track=Blabla(Main) HTTP/1.1\" 200 5864 - "
+ "\"Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) "
- + "AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.65
Safari/525.19\"");
+ + "AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.65
Safari/525.19\"",
+ IPStackUtils.resolveLoopbackAddress()));
// Deserialize
Object row = serDe.deserialize(t);
diff --git
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
index 4e1a5da725c..dad8efa5705 100644
---
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
+++
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
@@ -75,6 +75,7 @@
import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.common.IPStackUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -297,15 +298,16 @@ public void configureTableJobProperties(
public URI getURIForAuth(Table table) throws URISyntaxException {
Map<String, String> tableProperties =
HiveCustomStorageHandlerUtils.getTableProperties(table);
hbaseConf = getConf();
- String hbase_host = tableProperties.getOrDefault(HBASE_HOST_NAME,
- hbaseConf.get(HBASE_HOST_NAME));
+ String hbase_host =
IPStackUtils.adaptLoopbackAddress(tableProperties.getOrDefault(HBASE_HOST_NAME,
+ hbaseConf.get(HBASE_HOST_NAME)));
String hbase_port = tableProperties.getOrDefault(HBASE_CLIENT_PORT,
hbaseConf.get(HBASE_CLIENT_PORT));
String table_name =
encodeString(tableProperties.getOrDefault(HBaseSerDe.HBASE_TABLE_NAME,
null));
String column_family = encodeString(tableProperties.getOrDefault(
HBaseSerDe.HBASE_COLUMNS_MAPPING, null));
- String URIString = HBASE_PREFIX + "//" + hbase_host + ":" + hbase_port +
"/" + table_name;
+ String URIString = HBASE_PREFIX + "//" +
IPStackUtils.concatHostPort(hbase_host, Integer.parseInt(hbase_port)) +
+ "/" + table_name;
if (column_family != null) {
URIString += "/" + column_family;
}
diff --git
a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java
b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java
index 535d1386041..422c2c302d5 100644
---
a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java
+++
b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java
@@ -29,6 +29,7 @@
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.hive.common.IPStackUtils;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
@@ -61,7 +62,7 @@ public void testGetUriForAuthEmptyTableDefaultHostPort()
throws URISyntaxExcepti
// written out. At the time this test was written, this was the current
// behavior, so I left this test as/is. Need to research if a null
// table can be provided here.
- Assert.assertEquals("hbase://127.0.0.1:2181/null", uri.toString());
+ Assert.assertEquals(String.format("hbase://%s/null",
IPStackUtils.concatLoopbackAddressPort(2181)), uri.toString());
}
@Test
diff --git
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java
index 174fbae8efd..e23876062fa 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java
@@ -34,6 +34,7 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hive.common.IPStackUtils;
/**
* This class builds a single instance of itself with the Singleton
@@ -78,8 +79,8 @@ private void setupMiniDfsAndMrClusters() {
// Write the necessary config info to hadoop-site.xml
m_conf = m_mr.createJobConf();
m_conf.setInt("mapred.submit.replication", 1);
- m_conf.set("dfs.datanode.address", "0.0.0.0:0");
- m_conf.set("dfs.datanode.http.address", "0.0.0.0:0");
+ m_conf.set("dfs.datanode.address",
IPStackUtils.concatWildcardAddressPort(0));
+ m_conf.set("dfs.datanode.http.address",
IPStackUtils.concatWildcardAddressPort(0));
m_conf.writeXml(new FileOutputStream(conf_file));
// Set the system properties needed by Pig
diff --git
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
index 04d99254e4e..86dc20c9e70 100644
---
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
+++
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
@@ -34,6 +34,7 @@
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import
org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.apache.hive.common.IPStackUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang3.StringUtils;
@@ -85,7 +86,7 @@ public class Main {
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
public static final int DEFAULT_PORT = 8080;
- public static final String DEFAULT_HOST = "0.0.0.0";
+ public static final String DEFAULT_HOST =
IPStackUtils.resolveWildcardAddress();
public static final String DEFAULT_KEY_STORE_PATH = "";
public static final String DEFAULT_KEY_STORE_PASSWORD = "";
public static final String DEFAULT_SSL_PROTOCOL_BLACKLIST = "SSLv2,SSLv3";
diff --git
a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
index cf48221c81d..a79c5af9cad 100644
---
a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
+++
b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
@@ -20,13 +20,13 @@
import java.io.File;
import java.io.FileNotFoundException;
-import java.net.URISyntaxException;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hive.shims.HadoopShimsSecure;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.common.IPStackUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@@ -300,9 +300,11 @@ public void testConstructingUserHomeDirectory() throws
Exception {
@Test
public void testPropertiesParsing() throws Exception {
- String[] props =
{"hive.metastore.uris=thrift://localhost:9933\\,thrift://127.0.0.1:9933",
- "hive.metastore.sasl.enabled=false",
- "hive.some.fake.path=C:\\foo\\bar.txt\\"};
+ String[] props = {
+
String.format("hive.metastore.uris=thrift://localhost:9933\\,thrift://%s",
IPStackUtils.concatLoopbackAddressPort(9933)),
+ "hive.metastore.sasl.enabled=false",
+ "hive.some.fake.path=C:\\foo\\bar.txt\\"
+ };
StringBuilder input = new StringBuilder();
for(String prop : props) {
if(input.length() > 0) {
diff --git
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java
index 29af4552cf8..6e5c4722d75 100644
---
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java
+++
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java
@@ -28,6 +28,7 @@
import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
import
org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hive.common.IPStackUtils;
import org.apache.hive.service.cli.CLIService;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationHandle;
@@ -122,7 +123,7 @@ private HiveSession openSession() {
Preconditions.checkState(started, "You have to start TestHiveShell first,
before opening a session.");
try {
SessionHandle sessionHandle = client.getSessionManager().openSession(
- CLIService.SERVER_VERSION, "", "", "127.0.0.1",
Collections.emptyMap());
+ CLIService.SERVER_VERSION, "", "",
IPStackUtils.resolveLoopbackAddress(), Collections.emptyMap());
return client.getSessionManager().getSession(sessionHandle);
} catch (Exception e) {
throw new RuntimeException("Unable to open new Hive session: ", e);
diff --git
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
index 8c28c63b62b..82ae630f56a 100644
---
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
+++
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
@@ -34,6 +34,7 @@
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hive.common.IPStackUtils;
import org.apache.hive.testutils.MiniZooKeeperCluster;
import java.io.File;
@@ -261,7 +262,7 @@ private void setupHBaseCluster() {
hbaseConf.set("hbase.rootdir", hbaseRoot);
hbaseConf.set("hbase.master", "local");
hbaseConf.setInt(HConstants.ZOOKEEPER_CLIENT_PORT, zookeeperPort);
- hbaseConf.set(HConstants.ZOOKEEPER_QUORUM, "127.0.0.1");
+ hbaseConf.set(HConstants.ZOOKEEPER_QUORUM,
IPStackUtils.resolveLoopbackAddress());
hbaseConf.setInt("hbase.master.port", findFreePort());
hbaseConf.setInt("hbase.master.info.port", -1);
hbaseConf.setInt("hbase.regionserver.port", findFreePort());
diff --git
a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
index 4165e787120..6ff547217ec 100644
---
a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
+++
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
@@ -31,6 +31,7 @@
import org.apache.hadoop.hive.hooks.TestHs2Hooks.PostExecHook;
import org.apache.hadoop.hive.hooks.TestHs2Hooks.PreExecHook;
import org.apache.hadoop.hive.hooks.TestHs2Hooks.SemanticAnalysisHook;
+import org.apache.hive.common.IPStackUtils;
import org.apache.hive.jdbc.miniHS2.MiniHS2;
import org.junit.After;
import org.junit.AfterClass;
@@ -122,14 +123,14 @@ public void testHookContexts() throws Throwable {
Assert.assertNotNull("userName is null", PostExecHook.userName);
Assert.assertNotNull("operation is null", PostExecHook.operation);
Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1, PostExecHook.userName);
- Assert.assertTrue(PostExecHook.ipAddress,
PostExecHook.ipAddress.contains("127.0.0.1"));
+
Assert.assertTrue(IPStackUtils.isActiveStackLoopbackIP(PostExecHook.ipAddress));
Assert.assertEquals("SHOWTABLES", PostExecHook.operation);
Assert.assertNotNull("ipaddress is null", PreExecHook.ipAddress);
Assert.assertNotNull("userName is null", PreExecHook.userName);
Assert.assertNotNull("operation is null", PreExecHook.operation);
Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1, PreExecHook.userName);
- Assert.assertTrue(PreExecHook.ipAddress,
PreExecHook.ipAddress.contains("127.0.0.1"));
+
Assert.assertTrue(IPStackUtils.isActiveStackLoopbackIP(PreExecHook.ipAddress));
Assert.assertEquals("SHOWTABLES", PreExecHook.operation);
error = SemanticAnalysisHook.preAnalyzeError;
@@ -149,8 +150,7 @@ public void testHookContexts() throws Throwable {
SemanticAnalysisHook.command);
Assert.assertNotNull("semantic hook context commandType is null",
SemanticAnalysisHook.commandType);
- Assert.assertTrue(SemanticAnalysisHook.ipAddress,
- SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
+
Assert.assertTrue(IPStackUtils.isActiveStackLoopbackIP(SemanticAnalysisHook.ipAddress));
Assert.assertEquals("show tables", SemanticAnalysisHook.command);
}
}
\ No newline at end of file
diff --git
a/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/metastore/security/TestHadoopAuthBridge23.java
b/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/metastore/security/TestHadoopAuthBridge23.java
index 98571ffe213..b2550005633 100644
---
a/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/metastore/security/TestHadoopAuthBridge23.java
+++
b/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/metastore/security/TestHadoopAuthBridge23.java
@@ -39,6 +39,7 @@
import
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
import org.apache.hadoop.security.token.delegation.DelegationKey;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.common.IPStackUtils;
import org.apache.thrift.transport.TSaslServerTransport;
import org.apache.thrift.transport.TTransportException;
import org.junit.Assert;
@@ -129,7 +130,8 @@ private void configureSuperUserIPAddresses(Configuration
conf,
builder.append(ip);
builder.append(',');
}
- builder.append("127.0.1.1,");
+ builder.append(IPStackUtils.resolveLoopbackAddress());
+ builder.append(",");
builder.append(InetAddress.getLocalHost().getCanonicalHostName());
conf.setStrings(DefaultImpersonationProvider.getTestProvider().getProxySuperuserIpConfKey(superUserShortName),
builder.toString());
diff --git
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
index 90ded94f597..1c51823b7ce 100644
---
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
+++
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
@@ -38,6 +38,7 @@
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hive.common.IPStackUtils;
import org.apache.hive.jdbc.HiveConnection;
import org.apache.hive.service.server.HiveServer2;
import org.junit.AfterClass;
@@ -206,14 +207,14 @@ public void testHookContexts() throws Throwable {
Assert.assertNotNull(PostExecHook.ipAddress, "ipaddress is null");
Assert.assertNotNull(PostExecHook.userName, "userName is null");
Assert.assertNotNull(PostExecHook.operation , "operation is null");
- Assert.assertTrue(PostExecHook.ipAddress,
PostExecHook.ipAddress.contains("127.0.0.1"));
+
Assert.assertTrue(IPStackUtils.isActiveStackLoopbackIP(PostExecHook.ipAddress));
Assert.assertEquals("SHOWTABLES", PostExecHook.operation);
Assert.assertEquals(System.getProperty("user.name"), PreExecHook.userName);
Assert.assertNotNull("ipaddress is null", PreExecHook.ipAddress);
Assert.assertNotNull("userName is null", PreExecHook.userName);
Assert.assertNotNull("operation is null", PreExecHook.operation);
- Assert.assertTrue(PreExecHook.ipAddress,
PreExecHook.ipAddress.contains("127.0.0.1"));
+
Assert.assertTrue(IPStackUtils.isActiveStackLoopbackIP(PreExecHook.ipAddress));
Assert.assertEquals("SHOWTABLES", PreExecHook.operation);
error = SemanticAnalysisHook.preAnalyzeError;
@@ -233,8 +234,7 @@ public void testHookContexts() throws Throwable {
SemanticAnalysisHook.command);
Assert.assertNotNull("semantic hook context commandType is null",
SemanticAnalysisHook.commandType);
- Assert.assertTrue(SemanticAnalysisHook.ipAddress,
- SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
+
Assert.assertTrue(IPStackUtils.isActiveStackLoopbackIP(SemanticAnalysisHook.ipAddress));
Assert.assertEquals("show tables", SemanticAnalysisHook.command);
stmt.close();
diff --git
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
index 78304634c4e..a3f5e9ab040 100644
---
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
+++
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
@@ -53,6 +53,7 @@
import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.common.IPStackUtils;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
@@ -575,7 +576,8 @@ private void configureSuperUserIPAddresses(Configuration
conf,
builder.append(ip);
builder.append(',');
}
- builder.append("127.0.1.1,");
+ builder.append(IPStackUtils.resolveLoopbackAddress());
+ builder.append(",");
builder.append(InetAddress.getLocalHost().getCanonicalHostName());
conf.setStrings(DefaultImpersonationProvider.getTestProvider().getProxySuperuserIpConfKey(superUserShortName),
builder.toString());
diff --git
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestWarehouseDnsPath.java
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestWarehouseDnsPath.java
index 413e5ae03ac..4c7b7b8969a 100644
---
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestWarehouseDnsPath.java
+++
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestWarehouseDnsPath.java
@@ -24,6 +24,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hive.common.IPStackUtils;
import org.junit.Test;
public class TestWarehouseDnsPath {
@@ -40,7 +41,8 @@ public void testDnsPathNullAuthority() throws Exception {
@Test
public void testDnsPathWithAuthority() throws Exception {
conf.set("fs.defaultFS", "hdfs://localhost");
- assertEquals("hdfs://127.0.0.1/path/1",
transformPath("hdfs://127.0.0.1/path/1"));
+ assertEquals(String.format("hdfs://%s/path/1",
IPStackUtils.formatIPAddressForURL(IPStackUtils.resolveLoopbackAddress())),
+ transformPath(String.format("hdfs://%s/path/1",
IPStackUtils.formatIPAddressForURL(IPStackUtils.resolveLoopbackAddress()))));
conf.set("fs.defaultFS", "s3a://bucket");
assertEquals("s3a://bucket/path/1", transformPath("s3a://bucket/path/1"));
}
diff --git
a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
index 08b784a641d..c146d058c68 100644
---
a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
+++
b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
@@ -44,6 +44,7 @@
import
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import org.apache.hive.common.IPStackUtils;
import org.apache.hive.jdbc.HttpBasicAuthInterceptor;
import org.apache.hive.service.auth.HiveAuthConstants;
import org.apache.hive.service.rpc.thrift.TCLIService;
@@ -315,8 +316,10 @@ public HiveAuthorizer
createHiveAuthorizer(HiveMetastoreClientFactory metastoreC
*/
@Test
public void testForwardedHeaders() throws Exception {
- verifyForwardedHeaders(new ArrayList<String>(Arrays.asList("127.0.0.1",
"202.101.101.101")), "show tables");
- verifyForwardedHeaders(new
ArrayList<String>(Arrays.asList("202.101.101.101")), "fs -ls /");
+ verifyForwardedHeaders(new
ArrayList<String>(Arrays.asList(IPStackUtils.resolveLoopbackAddress(),
+ IPStackUtils.transformToIPv6("202.101.101.101"))), "show tables");
+ verifyForwardedHeaders(new ArrayList<String>(Arrays.asList(
+ IPStackUtils.transformToIPv6("202.101.101.101"))), "fs -ls /");
verifyForwardedHeaders(new ArrayList<String>(), "show databases");
}
diff --git
a/kafka-handler/src/test/org/apache/hadoop/hive/kafka/KafkaBrokerResource.java
b/kafka-handler/src/test/org/apache/hadoop/hive/kafka/KafkaBrokerResource.java
index ed97308a23e..642358bf337 100644
---
a/kafka-handler/src/test/org/apache/hadoop/hive/kafka/KafkaBrokerResource.java
+++
b/kafka-handler/src/test/org/apache/hadoop/hive/kafka/KafkaBrokerResource.java
@@ -25,6 +25,7 @@
import kafka.zk.AdminZkClient;
import kafka.zk.EmbeddedZookeeper;
import org.apache.commons.io.FileUtils;
+import org.apache.hive.common.IPStackUtils;
import org.apache.kafka.common.network.Mode;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.test.TestSslUtils;
@@ -47,9 +48,9 @@
class KafkaBrokerResource extends ExternalResource {
private static final Logger LOG =
LoggerFactory.getLogger(KafkaBrokerResource.class);
private static final String TOPIC = "TEST-CREATE_TOPIC";
- static final String BROKER_IP_PORT = "127.0.0.1:9092";
- static final String BROKER_SASL_PORT = "127.0.0.1:9093";
- static final String BROKER_SASL_SSL_PORT = "127.0.0.1:9094";
+ static final String BROKER_IP_PORT =
IPStackUtils.concatLoopbackAddressPort(9092);
+ static final String BROKER_SASL_PORT =
IPStackUtils.concatLoopbackAddressPort(9093);
+ static final String BROKER_SASL_SSL_PORT =
IPStackUtils.concatLoopbackAddressPort(9094);
private EmbeddedZookeeper zkServer;
private KafkaServer kafkaServer;
private AdminZkClient adminZkClient;
@@ -79,7 +80,7 @@ KafkaBrokerResource enableSASL(String principal, String
keytab) {
LOG.info("init embedded Zookeeper");
tmpLogDir = Files.createTempDirectory("kafka-log-dir-").toAbsolutePath();
zkServer = new EmbeddedZookeeper();
- String zkConnect = "127.0.0.1:" + zkServer.port();
+ String zkConnect = IPStackUtils.concatLoopbackAddressPort(zkServer.port());
LOG.info("init kafka broker");
Properties brokerProps = new Properties();
brokerProps.setProperty("zookeeper.connect", zkConnect);
diff --git
a/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java
b/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java
index 5bf615c7e66..afd5ab920c1 100644
---
a/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java
+++
b/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java
@@ -20,17 +20,13 @@
import java.net.BindException;
import java.net.InetSocketAddress;
import java.util.HashMap;
-import java.util.List;
import java.util.Map;
-import java.util.Random;
import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.Validator.RangeValidator;
-import
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto;
import org.apache.hadoop.hive.llap.protocol.LlapTaskUmbilicalProtocol;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.Server;
@@ -39,6 +35,7 @@
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.security.authorize.Service;
import org.apache.hadoop.security.token.Token;
+import org.apache.hive.common.IPStackUtils;
import org.apache.tez.common.security.JobTokenIdentifier;
import org.apache.tez.common.security.JobTokenSecretManager;
import org.apache.tez.runtime.api.impl.TezEvent;
@@ -130,7 +127,7 @@ private void startServer(Configuration conf,
LlapTaskUmbilicalProtocol umbilical, int numHandlers, int port,
boolean isHadoopSecurityAuthorizationEnabled) throws IOException {
server = new RPC.Builder(conf).setProtocol(LlapTaskUmbilicalProtocol.class)
- .setBindAddress("0.0.0.0").setPort(port).setInstance(umbilical)
+
.setBindAddress(IPStackUtils.resolveWildcardAddress()).setPort(port).setInstance(umbilical)
.setNumHandlers(numHandlers).setSecretManager(jobTokenSecretManager)
.build();
if (isHadoopSecurityAuthorizationEnabled) {
diff --git
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java
index 0bb16366b52..fc8ea063678 100644
---
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java
+++
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java
@@ -41,6 +41,7 @@
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hive.common.IPStackUtils;
import org.apache.hive.common.util.HiveVersionInfo;
import org.apache.hive.http.HttpServer;
import org.slf4j.Logger;
@@ -80,7 +81,7 @@ public void serviceInit(Configuration conf) {
this.useSSL = HiveConf.getBoolVar(conf, ConfVars.LLAP_DAEMON_WEB_SSL);
this.useSPNEGO = HiveConf.getBoolVar(conf, ConfVars.LLAP_WEB_AUTO_AUTH);
- String bindAddress = "0.0.0.0";
+ String bindAddress = IPStackUtils.resolveWildcardAddress();
HttpServer.Builder builder =
new HttpServer.Builder("llap").setPort(this.port).setHost(bindAddress);
builder.setConf(new HiveConf(conf, HiveConf.class));
diff --git
a/llap-server/src/test/org/apache/hadoop/hive/llap/shufflehandler/TestShuffleHandler.java
b/llap-server/src/test/org/apache/hadoop/hive/llap/shufflehandler/TestShuffleHandler.java
index 7366273d97f..f53891d4f5a 100644
---
a/llap-server/src/test/org/apache/hadoop/hive/llap/shufflehandler/TestShuffleHandler.java
+++
b/llap-server/src/test/org/apache/hadoop/hive/llap/shufflehandler/TestShuffleHandler.java
@@ -32,6 +32,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hive.common.IPStackUtils;
import org.apache.hive.common.util.Retry;
import
org.apache.tez.runtime.library.common.shuffle.orderedgrouped.ShuffleHeader;
import org.junit.Assert;
@@ -179,8 +180,8 @@ protected void sendError(ChannelHandlerContext ctx, String
message,
shuffleHandler.start();
- String shuffleBaseURL = "http://127.0.0.1:"
- + conf.get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY);
+ String shuffleBaseURL = String.format("http://%s",
IPStackUtils.concatLoopbackAddressPort(
+ conf.getInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY,
ShuffleHandler.DEFAULT_SHUFFLE_PORT)));
URL url = new URL(shuffleBaseURL +
"/mapOutput?job=job_12345_1&dag=1&reduce=1&"
+ "map=attempt_12345_1_m_1_0");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
@@ -238,8 +239,8 @@ public void testSocketKeepAlive() throws Exception {
try {
shuffleHandler.start();
- String shuffleBaseURL = "http://127.0.0.1:"
- + conf.get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY);
+ String shuffleBaseURL = String.format("http://%s",
IPStackUtils.concatLoopbackAddressPort(
+ conf.getInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY,
ShuffleHandler.DEFAULT_SHUFFLE_PORT)));
URL url = new URL(shuffleBaseURL +
"/mapOutput?job=job_12345_1&dag=1&reduce=1&"
+ "map=attempt_12345_1_m_1_0");
conn = (HttpURLConnection) url.openConnection();
diff --git
a/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskCommunicator.java
b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskCommunicator.java
index d850473ffb5..a90de995a83 100644
---
a/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskCommunicator.java
+++
b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskCommunicator.java
@@ -83,6 +83,7 @@
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
+import org.apache.hive.common.IPStackUtils;
import org.apache.tez.common.TezTaskUmbilicalProtocol;
import org.apache.tez.common.TezUtils;
import org.apache.tez.common.security.JobTokenSecretManager;
@@ -328,7 +329,7 @@ private void startServerInternal(Configuration conf, int
umbilicalPort,
int numHandlers, JobTokenSecretManager jobTokenSecretManager,
boolean isHadoopSecurityAuthorizationEnabled) throws IOException {
server = new RPC.Builder(conf).setProtocol(LlapTaskUmbilicalProtocol.class)
-
.setBindAddress("0.0.0.0").setPort(umbilicalPort).setInstance(umbilical)
+
.setBindAddress(IPStackUtils.resolveWildcardAddress()).setPort(umbilicalPort).setInstance(umbilical)
.setNumHandlers(numHandlers).setSecretManager(jobTokenSecretManager)
.build();
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
index 9e21a19dbe4..266e0ef299e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
@@ -78,6 +78,7 @@
import org.apache.hadoop.hive.ql.processors.CommandProcessorException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.txn.compactor.CompactorTestUtilities;
+import org.apache.hive.common.IPStackUtils;
import org.apache.thrift.TException;
import org.junit.Assert;
import org.junit.Ignore;
@@ -602,8 +603,8 @@ public void testDDLsAdvancingWriteIds() throws Exception {
validWriteIds = msClient.getValidWriteIds("default." +
tableName).toString();
Assert.assertEquals("default.alter_table:8:9223372036854775807::",
validWriteIds);
- runStatementOnDriver(String.format("ALTER TABLE %s SET SKEWED LOCATION
(1='hdfs://127.0.0.1:8020/abcd/1')",
- tableName));
+ runStatementOnDriver(String.format("ALTER TABLE %s SET SKEWED LOCATION
(1='hdfs://%s/abcd/1')",
+ tableName, IPStackUtils.concatLoopbackAddressPort(8020)));
validWriteIds = msClient.getValidWriteIds("default." +
tableName).toString();
Assert.assertEquals("default.alter_table:9:9223372036854775807::",
validWriteIds);
diff --git
a/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java
b/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java
index 9c87ecbe290..117590456dd 100644
---
a/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java
+++
b/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java
@@ -25,6 +25,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol;
+import org.apache.hive.common.IPStackUtils;
import org.apache.thrift.TConfiguration;
import org.apache.thrift.protocol.TField;
import org.apache.thrift.protocol.TList;
@@ -319,8 +320,8 @@ public void testQuotedWrites() throws Exception {
*/
@Test
public void test1ApacheLogFormat() throws Exception {
- final String sample =
- "127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif
HTTP/1.0\" 200 2326";
+ final String sample = String.format(
+ "%s - frank [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\"
200 2326", IPStackUtils.resolveLoopbackAddress());
TMemoryBuffer trans = new TMemoryBuffer(4096);
trans.write(sample.getBytes(), 0, sample.getBytes().length);
@@ -345,7 +346,7 @@ public void test1ApacheLogFormat() throws Exception {
final String ip = prot.readString();
prot.readFieldEnd();
- assertEquals("127.0.0.1", ip);
+ assertTrue(IPStackUtils.isActiveStackLoopbackIP(ip));
// identd
prot.readFieldBegin();
diff --git
a/service/src/java/org/apache/hive/service/auth/saml/HiveSamlHttpServlet.java
b/service/src/java/org/apache/hive/service/auth/saml/HiveSamlHttpServlet.java
index 4527ed50d37..6bb39b90c08 100644
---
a/service/src/java/org/apache/hive/service/auth/saml/HiveSamlHttpServlet.java
+++
b/service/src/java/org/apache/hive/service/auth/saml/HiveSamlHttpServlet.java
@@ -64,7 +64,7 @@ protected void doPost(HttpServletRequest request,
HttpServletResponse response)
} else {
LOG.error("SAML response could not be validated", e);
}
- generateFormData(response, HiveSamlUtils.getLoopBackAddress(port), null,
false,
+ generateFormData(response, HiveSamlUtils.resolveLoopbackAddress(port),
null, false,
"SAML assertion could not be validated. Check server logs for more
details.");
return;
}
@@ -72,7 +72,7 @@ protected void doPost(HttpServletRequest request,
HttpServletResponse response)
LOG.info(
"Successfully validated saml response for user {}. Forwarding the
token to port {}",
nameId, port);
- generateFormData(response, HiveSamlUtils.getLoopBackAddress(port),
+ generateFormData(response, HiveSamlUtils.resolveLoopbackAddress(port),
tokenGenerator.get(nameId, relayState), true, "");
}
diff --git
a/service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java
b/service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java
index 5a5700b6e4b..68f2fe2eedb 100644
--- a/service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java
+++ b/service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java
@@ -24,8 +24,7 @@
import javax.servlet.http.HttpServletRequest;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hive.service.ServiceUtils;
-import org.apache.hive.service.auth.HiveAuthConstants;
+import org.apache.hive.common.IPStackUtils;
public class HiveSamlUtils {
@@ -65,9 +64,9 @@ public static URI getCallBackUri(HiveConf conf) throws
Exception {
}
}
- public static final String LOOP_BACK_INTERFACE = "127.0.0.1";
- public static String getLoopBackAddress(int port) {
- return String.format("http://%s:%s",LOOP_BACK_INTERFACE, port);
+ public static final String LOOP_BACK_INTERFACE =
IPStackUtils.resolveLoopbackAddress();
+ public static String resolveLoopbackAddress(int port) {
+ return String.format("http://%s",
IPStackUtils.concatHostPort(LOOP_BACK_INTERFACE, port));
}
/**
diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java
b/service/src/java/org/apache/hive/service/server/HiveServer2.java
index 34d43ae2c82..8c567c31616 100644
--- a/service/src/java/org/apache/hive/service/server/HiveServer2.java
+++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java
@@ -99,6 +99,7 @@
import org.apache.hadoop.hive.registry.impl.ZookeeperUtils;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
+import org.apache.hive.common.IPStackUtils;
import org.apache.hive.common.util.HiveStringUtils;
import org.apache.hive.common.util.HiveVersionInfo;
import org.apache.hive.common.util.Ref;
@@ -386,7 +387,8 @@ public synchronized void init(HiveConf hiveConf) {
final String webHost;
try {
webUIPort = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_PORT);
- webHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_BIND_HOST);
+ webHost = IPStackUtils.adaptWildcardAddress(
+ hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_BIND_HOST));
// We disable web UI in tests unless the test is explicitly setting a
// unique web ui port so that we don't mess up ptests.
boolean uiDisabledInTest = hiveConf.getBoolVar(ConfVars.HIVE_IN_TEST) &&
diff --git
a/service/src/test/org/apache/hive/service/cli/TestCLIServiceConnectionLimits.java
b/service/src/test/org/apache/hive/service/cli/TestCLIServiceConnectionLimits.java
index 6ce40ec68dc..b41cb503db6 100644
---
a/service/src/test/org/apache/hive/service/cli/TestCLIServiceConnectionLimits.java
+++
b/service/src/test/org/apache/hive/service/cli/TestCLIServiceConnectionLimits.java
@@ -20,6 +20,7 @@
import java.util.List;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.common.IPStackUtils;
import org.apache.hive.service.cli.session.SessionManager;
import org.junit.Test;
import org.junit.rules.ExpectedException;
@@ -42,7 +43,8 @@ public void testNoLimit() throws HiveSQLException {
List<SessionHandle> sessionHandles = new ArrayList<>();
try {
for (int i = 0; i < limit + 1; i++) {
- SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar", "127.0.0.1", null);
+ SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar",
+ IPStackUtils.resolveLoopbackAddress(), null);
sessionHandles.add(session);
}
} finally {
@@ -63,7 +65,8 @@ public void testIncrementAndDecrementConnectionsUser() throws
HiveSQLException {
try {
// open 5 connections
for (int i = 0; i < limit / 2; i++) {
- SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar", "127.0.0.1", null);
+ SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar",
+ IPStackUtils.resolveLoopbackAddress(), null);
sessionHandles.add(session);
}
@@ -75,7 +78,8 @@ public void testIncrementAndDecrementConnectionsUser() throws
HiveSQLException {
// open till limit but not exceed
for (int i = 0; i < limit; i++) {
- SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"ff", "bar", "127.0.0.1", null);
+ SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"ff", "bar",
+ IPStackUtils.resolveLoopbackAddress(), null);
sessionHandles.add(session);
}
} finally {
@@ -95,12 +99,14 @@ public void testInvalidUserName() throws HiveSQLException {
List<SessionHandle> sessionHandles = new ArrayList<>();
try {
for (int i = 0; i < limit + 1; i++) {
- SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
null, "bar", "127.0.0.1", null);
+ SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
null, "bar",
+ IPStackUtils.resolveLoopbackAddress(), null);
sessionHandles.add(session);
}
for (int i = 0; i < limit + 1; i++) {
- SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"", "bar", "127.0.0.1", null);
+ SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"", "bar",
+ IPStackUtils.resolveLoopbackAddress(), null);
sessionHandles.add(session);
}
} finally {
@@ -168,7 +174,8 @@ public void testConnectionLimitPerUser() throws
HiveSQLException {
List<SessionHandle> sessionHandles = new ArrayList<>();
try {
for (int i = 0; i < limit + 1; i++) {
- SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar", "127.0.0.1", null);
+ SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar",
+ IPStackUtils.resolveLoopbackAddress(), null);
sessionHandles.add(session);
}
@@ -183,7 +190,8 @@ public void testConnectionLimitPerUser() throws
HiveSQLException {
@Test
public void testConnectionLimitPerIpAddress() throws HiveSQLException {
thrown.expect(HiveSQLException.class);
- thrown.expectMessage("Connection limit per ipaddress reached (ipaddress:
127.0.0.1 limit: 10)");
+ thrown.expectMessage(String.format("Connection limit per ipaddress reached
(ipaddress: %s limit: 10)",
+ IPStackUtils.resolveLoopbackAddress()));
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER,
0);
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS,
10);
@@ -192,7 +200,8 @@ public void testConnectionLimitPerIpAddress() throws
HiveSQLException {
List<SessionHandle> sessionHandles = new ArrayList<>();
try {
for (int i = 0; i < limit + 1; i++) {
- SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar", "127.0.0.1", null);
+ SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar",
+ IPStackUtils.resolveLoopbackAddress(), null);
sessionHandles.add(session);
}
@@ -207,7 +216,8 @@ public void testConnectionLimitPerIpAddress() throws
HiveSQLException {
@Test
public void testConnectionLimitPerUserIpAddress() throws HiveSQLException {
thrown.expect(HiveSQLException.class);
- thrown.expectMessage("Connection limit per user:ipaddress reached
(user:ipaddress: foo:127.0.0.1 limit: 10)");
+ thrown.expectMessage(String.format("Connection limit per user:ipaddress
reached (user:ipaddress: foo:%s limit: 10)",
+ IPStackUtils.resolveLoopbackAddress()));
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER,
0);
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS,
0);
@@ -216,7 +226,8 @@ public void testConnectionLimitPerUserIpAddress() throws
HiveSQLException {
List<SessionHandle> sessionHandles = new ArrayList<>();
try {
for (int i = 0; i < limit + 1; i++) {
- SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar", "127.0.0.1", null);
+ SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar",
+ IPStackUtils.resolveLoopbackAddress(), null);
sessionHandles.add(session);
}
@@ -240,7 +251,8 @@ public void testConnectionMultipleLimitsUserAndIP() throws
HiveSQLException {
List<SessionHandle> sessionHandles = new ArrayList<>();
try {
for (int i = 0; i < limit + 1; i++) {
- SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar", "127.0.0.1", null);
+ SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar",
+ IPStackUtils.resolveLoopbackAddress(), null);
sessionHandles.add(session);
}
@@ -255,7 +267,8 @@ public void testConnectionMultipleLimitsUserAndIP() throws
HiveSQLException {
@Test
public void testConnectionMultipleLimitsIPAndUserIP() throws
HiveSQLException {
thrown.expect(HiveSQLException.class);
- thrown.expectMessage("Connection limit per ipaddress reached (ipaddress:
127.0.0.1 limit: 5)");
+ thrown.expectMessage(String.format("Connection limit per ipaddress reached
(ipaddress: %s limit: 5)",
+ IPStackUtils.resolveLoopbackAddress()));
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER,
0);
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS,
5);
@@ -264,7 +277,8 @@ public void testConnectionMultipleLimitsIPAndUserIP()
throws HiveSQLException {
List<SessionHandle> sessionHandles = new ArrayList<>();
try {
for (int i = 0; i < limit + 1; i++) {
- SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar", "127.0.0.1", null);
+ SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar",
+ IPStackUtils.resolveLoopbackAddress(), null);
sessionHandles.add(session);
}
@@ -279,7 +293,8 @@ public void testConnectionMultipleLimitsIPAndUserIP()
throws HiveSQLException {
@Test
public void testConnectionMultipleLimitsUserIPAndUser() throws
HiveSQLException {
thrown.expect(HiveSQLException.class);
- thrown.expectMessage("Connection limit per user:ipaddress reached
(user:ipaddress: foo:127.0.0.1 limit: 10)");
+ thrown.expectMessage(String.format("Connection limit per user:ipaddress
reached (user:ipaddress: foo:%s limit: 10)",
+ IPStackUtils.resolveLoopbackAddress()));
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER,
15);
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS,
0);
@@ -288,7 +303,8 @@ public void testConnectionMultipleLimitsUserIPAndUser()
throws HiveSQLException
List<SessionHandle> sessionHandles = new ArrayList<>();
try {
for (int i = 0; i < limit + 1; i++) {
- SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar", "127.0.0.1", null);
+ SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar",
+ IPStackUtils.resolveLoopbackAddress(), null);
sessionHandles.add(session);
}
@@ -303,17 +319,20 @@ public void testConnectionMultipleLimitsUserIPAndUser()
throws HiveSQLException
@Test
public void testConnectionForwardedIpAddresses() throws HiveSQLException {
thrown.expect(HiveSQLException.class);
- thrown.expectMessage("Connection limit per ipaddress reached (ipaddress:
194.167.0.3 limit: 10)");
+ thrown.expectMessage(String.format("Connection limit per ipaddress reached
(ipaddress: %s limit: 10)",
+ IPStackUtils.transformToIPv6("194.167.0.3")));
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER,
0);
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS,
10);
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS,
10);
CLIService service = getService(conf);
- SessionManager.setForwardedAddresses(Lists.newArrayList("194.167.0.3",
"194.167.0.2", "194.167.0.1"));
+
SessionManager.setForwardedAddresses(Lists.newArrayList(IPStackUtils.transformToIPv6("194.167.0.3"),
+ IPStackUtils.transformToIPv6("194.167.0.2"),
IPStackUtils.transformToIPv6("194.167.0.1")));
List<SessionHandle> sessionHandles = new ArrayList<>();
try {
for (int i = 0; i < limit + 1; i++) {
- SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar", "194.167.0.1", null);
+ SessionHandle session = service.openSession(CLIService.SERVER_VERSION,
"foo", "bar",
+ IPStackUtils.transformToIPv6("194.167.0.1"), null);
sessionHandles.add(session);
}
diff --git
a/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java
b/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java
index 53b9b636794..b0092ef4aa1 100644
---
a/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java
+++
b/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java
@@ -20,6 +20,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveServer2TransportMode;
+import org.apache.hive.common.IPStackUtils;
import org.apache.hive.service.Service;
import org.apache.hive.service.auth.HiveAuthConstants;
import org.apache.hive.service.cli.session.HiveSession;
@@ -116,7 +117,7 @@ public void testRetryBehaviour() throws Exception {
// Reset port setting
hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT, thriftPort);
- hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST,
"10.17.207.11");
+ hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST,
IPStackUtils.transformToIPv6("10.17.207.11"));
try {
RetryingThriftCLIServiceClientTest.newRetryingCLIServiceClient(hiveConf);
fail("Expected to throw exception for invalid host");
@@ -125,7 +126,7 @@ public void testRetryBehaviour() throws Exception {
assertTrue(sqlExc.getMessage().contains("3"));
}
// Reset host setting
- hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST,
"127.0.0.1");
+ hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST,
IPStackUtils.resolveLoopbackAddress());
// Create client
RetryingThriftCLIServiceClient.CLIServiceClientWrapper cliServiceClient
diff --git
a/service/src/test/org/apache/hive/service/cli/operation/TestOperationLogManager.java
b/service/src/test/org/apache/hive/service/cli/operation/TestOperationLogManager.java
index ddb65b997fd..ff29233287f 100644
---
a/service/src/test/org/apache/hive/service/cli/operation/TestOperationLogManager.java
+++
b/service/src/test/org/apache/hive/service/cli/operation/TestOperationLogManager.java
@@ -26,6 +26,7 @@
import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger;
+import org.apache.hive.common.IPStackUtils;
import org.junit.Before;
import org.junit.Test;
@@ -176,7 +177,7 @@ private byte[] writeBytes(File logFile, int maxBytes)
throws Exception {
private class FakeHiveSession extends HiveSessionImpl {
public FakeHiveSession(SessionHandle sessionHandle, HiveConf serverConf) {
super(sessionHandle, TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V11,
"dummy", "",
- serverConf, "0.0.0.0", null);
+ serverConf, IPStackUtils.resolveWildcardAddress(), null);
}
}
diff --git
a/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java
b/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java
index 7f31dce576f..3b8005f076c 100644
---
a/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java
+++
b/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java
@@ -33,6 +33,7 @@
import org.apache.hadoop.hive.common.metrics.metrics2.MetricsReporting;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hive.common.IPStackUtils;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationHandle;
@@ -207,13 +208,13 @@ public void testOpenSessionMetrics() throws Exception {
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE,
MetricsConstant.HS2_OPEN_SESSIONS, 0);
SessionHandle handle =
- sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", "127.0.0.1",
+ sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", IPStackUtils.resolveLoopbackAddress(),
new HashMap<String, String>());
json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE,
MetricsConstant.HS2_OPEN_SESSIONS, 1);
- sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", "127.0.0.1",
+ sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", IPStackUtils.resolveLoopbackAddress(),
new HashMap<String, String>());
json = metrics.dumpJson();
@@ -234,7 +235,7 @@ public void testOpenSessionTimeMetrics() throws Exception {
long firstSessionOpen = System.currentTimeMillis();
SessionHandle handle =
- sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", "127.0.0.1",
+ sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", IPStackUtils.resolveLoopbackAddress(),
new HashMap<String, String>());
json = metrics.dumpJson();
@@ -242,7 +243,7 @@ public void testOpenSessionTimeMetrics() throws Exception {
(double)(System.currentTimeMillis() - firstSessionOpen), 100d);
long secondSessionOpen = System.currentTimeMillis();
- sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", "127.0.0.1",
+ sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", IPStackUtils.resolveLoopbackAddress(),
new HashMap<String, String>());
json = metrics.dumpJson();
@@ -268,7 +269,7 @@ public void testActiveSessionMetrics() throws Exception {
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE,
MetricsConstant.HS2_ACTIVE_SESSIONS, 0);
SessionHandle handle =
- sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", "127.0.0.1",
+ sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", IPStackUtils.resolveLoopbackAddress(),
new HashMap<String, String>());
final HiveSession session = sm.getSession(handle);
@@ -322,7 +323,7 @@ public void testActiveSessionTimeMetrics() throws Exception
{
MetricsConstant.HS2_AVG_ACTIVE_SESSION_TIME, "NaN");
SessionHandle handle =
- sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", "127.0.0.1",
+ sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", IPStackUtils.resolveLoopbackAddress(),
new HashMap<String, String>());
final HiveSession session = sm.getSession(handle);
@@ -376,7 +377,7 @@ public void testAbandonedSessionMetrics() throws Exception {
String json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER,
MetricsConstant.HS2_ABANDONED_SESSIONS, "");
- sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", "127.0.0.1",
+ sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", IPStackUtils.resolveLoopbackAddress(),
new HashMap<String, String>());
// We're going to wait for the session to be abandoned.
diff --git
a/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java
b/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java
index de0a71a7209..c8d5bef0d7d 100644
--- a/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java
+++ b/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java
@@ -24,6 +24,7 @@
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.common.IPStackUtils;
import org.apache.hive.service.cli.CLIService;
import org.apache.hive.service.cli.OperationHandle;
import org.apache.hive.service.cli.SessionHandle;
@@ -167,7 +168,7 @@ public void testApiServletHistoricalQueries() throws
Exception {
String historicalQueriesRoute = "/queries/historical";
final SessionHandle handle =
- sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", "127.0.0.1",
+ sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", IPStackUtils.resolveLoopbackAddress(),
new HashMap());
String queryString = "SET " +
HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname
@@ -197,8 +198,8 @@ public void testApiServletActiveSessions() throws Exception
{
Assert.assertTrue("[]".equals(initNoSessionsResponse));
SessionHandle handle1 =
- sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw", "127.0.0.1",
- new HashMap());
+ sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user",
"passw",
+ IPStackUtils.resolveLoopbackAddress(), new HashMap());
String oneSessionResponse = readFromUrl(apiBaseURL + sessionsRoute);
@@ -208,13 +209,13 @@ public void testApiServletActiveSessions() throws
Exception {
JsonNode session = sessionNodes.get(0);
Assert.assertEquals(session.path("sessionId").asText(),
handle1.getSessionId().toString());
Assert.assertEquals(session.path("username").asText(), "user");
- Assert.assertEquals(session.path("ipAddress").asText(), "127.0.0.1");
+ Assert.assertEquals(session.path("ipAddress").asText(),
IPStackUtils.resolveLoopbackAddress());
Assert.assertEquals(session.path("operationCount").asInt(), 0);
Assert.assertTrue(session.path("activeTime").canConvertToInt());
Assert.assertTrue(session.path("idleTime").canConvertToInt());
- SessionHandle handle2 =
sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw",
"127.0.0.1",
- new HashMap());
+ SessionHandle handle2 =
sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9,
+ "user", "passw", IPStackUtils.resolveLoopbackAddress(), new HashMap());
String twoSessionsResponse = readFromUrl(apiBaseURL + sessionsRoute);
List<JsonNode> twoSessionsNodes = getListOfNodes(twoSessionsResponse);
diff --git
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java
index d80f818c0e8..a23cc485463 100644
---
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java
+++
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java
@@ -32,6 +32,7 @@
import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+import org.apache.hive.common.IPStackUtils;
import org.apache.thrift.TException;
import org.apache.thrift.transport.TTransportException;
import org.junit.After;
@@ -174,7 +175,7 @@ public void testConnectionTimeout() throws Exception {
MetastoreConf.setTimeVar(newConf, ConfVars.CLIENT_CONNECTION_TIMEOUT, 1000,
TimeUnit.MILLISECONDS);
// fake host to mock connection time out
- MetastoreConf.setVar(newConf, ConfVars.THRIFT_URIS, "thrift://1.1.1.1:" +
port);
+ MetastoreConf.setVar(newConf, ConfVars.THRIFT_URIS, "thrift://" +
IPStackUtils.transformToIPv6("1.1.1.1", port));
MetastoreConf.setLongVar(newConf, ConfVars.THRIFT_CONNECTION_RETRIES, 1);
Future<Void> future = Executors.newSingleThreadExecutor().submit(() -> {