Repository: hive Updated Branches: refs/heads/master 130617443 -> 2f40ac3aa
HIVE-15025: Secure-Socket-Layer (SSL) support for HMS (Aihua Xu, reviewed by Chaoyu Tang) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2f40ac3a Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2f40ac3a Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2f40ac3a Branch: refs/heads/master Commit: 2f40ac3aa423af827c1978c9be977a7b2ce5e5c8 Parents: 1306174 Author: Aihua Xu <[email protected]> Authored: Fri Oct 21 13:58:08 2016 -0400 Committer: Aihua Xu <[email protected]> Committed: Thu Oct 27 10:01:38 2016 -0400 ---------------------------------------------------------------------- .../hadoop/hive/common/auth/HiveAuthUtils.java | 125 +++++++++++++++++++ .../org/apache/hadoop/hive/conf/HiveConf.java | 12 ++ .../test/java/org/apache/hive/jdbc/TestSSL.java | 66 ++++++++++ .../TestThriftHttpCLIServiceFeatures.java | 4 +- .../org/apache/hive/jdbc/HiveConnection.java | 7 +- .../hadoop/hive/metastore/HiveMetaStore.java | 40 +++++- .../hive/metastore/HiveMetaStoreClient.java | 43 ++++++- .../hive/metastore/TServerSocketKeepAlive.java | 4 +- .../hive/service/auth/HiveAuthFactory.java | 96 -------------- .../cli/thrift/ThriftBinaryCLIService.java | 5 +- 10 files changed, 286 insertions(+), 116 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/2f40ac3a/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java ---------------------------------------------------------------------- diff --git a/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java b/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java new file mode 100644 index 0000000..b4dac4b --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java @@ -0,0 +1,125 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.auth; + +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import javax.net.ssl.SSLParameters; +import javax.net.ssl.SSLServerSocket; +import javax.net.ssl.SSLSocket; + +import org.apache.thrift.transport.TSSLTransportFactory; +import org.apache.thrift.transport.TServerSocket; +import org.apache.thrift.transport.TSocket; +import org.apache.thrift.transport.TTransport; +import org.apache.thrift.transport.TTransportException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class helps in some aspects of authentication. It creates the proper Thrift classes for the + * given configuration as well as helps with authenticating requests. + */ +public class HiveAuthUtils { + private static final Logger LOG = LoggerFactory.getLogger(HiveAuthUtils.class); + + public static TTransport getSocketTransport(String host, int port, int loginTimeout) { + return new TSocket(host, port, loginTimeout); + } + + public static TTransport getSSLSocket(String host, int port, int loginTimeout) + throws TTransportException { + // The underlying SSLSocket object is bound to host:port with the given SO_TIMEOUT + TSocket tSSLSocket = TSSLTransportFactory.getClientSocket(host, port, loginTimeout); + return getSSLSocketWithHttps(tSSLSocket); + } + + public static TTransport getSSLSocket(String host, int port, int loginTimeout, + String trustStorePath, String trustStorePassWord) throws TTransportException { + TSSLTransportFactory.TSSLTransportParameters params = + new TSSLTransportFactory.TSSLTransportParameters(); + params.setTrustStore(trustStorePath, trustStorePassWord); + params.requireClientAuth(true); + // The underlying SSLSocket object is bound to host:port with the given SO_TIMEOUT and + // SSLContext created with the given params + TSocket tSSLSocket = TSSLTransportFactory.getClientSocket(host, port, loginTimeout, params); + return getSSLSocketWithHttps(tSSLSocket); + } + + // Using endpoint identification algorithm as HTTPS enables us to do + // CNAMEs/subjectAltName verification + private static TSocket getSSLSocketWithHttps(TSocket tSSLSocket) throws TTransportException { + SSLSocket sslSocket = (SSLSocket) tSSLSocket.getSocket(); + SSLParameters sslParams = sslSocket.getSSLParameters(); + sslParams.setEndpointIdentificationAlgorithm("HTTPS"); + sslSocket.setSSLParameters(sslParams); + return new TSocket(sslSocket); + } + + public static TServerSocket getServerSocket(String hiveHost, int portNum) + throws TTransportException { + InetSocketAddress serverAddress; + if (hiveHost == null || hiveHost.isEmpty()) { + // Wildcard bind + serverAddress = new InetSocketAddress(portNum); + } else { + serverAddress = new InetSocketAddress(hiveHost, portNum); + } + return new TServerSocket(serverAddress); + } + + public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, String keyStorePath, + String keyStorePassWord, List<String> sslVersionBlacklist) throws TTransportException, + UnknownHostException { + TSSLTransportFactory.TSSLTransportParameters params = + new TSSLTransportFactory.TSSLTransportParameters(); + params.setKeyStore(keyStorePath, keyStorePassWord); + InetSocketAddress serverAddress; + if (hiveHost == null || hiveHost.isEmpty()) { + // Wildcard bind + serverAddress = new InetSocketAddress(portNum); + } else { + serverAddress = new InetSocketAddress(hiveHost, portNum); + } + TServerSocket thriftServerSocket = + TSSLTransportFactory.getServerSocket(portNum, 0, serverAddress.getAddress(), params); + if (thriftServerSocket.getServerSocket() instanceof SSLServerSocket) { + List<String> sslVersionBlacklistLocal = new ArrayList<String>(); + for (String sslVersion : sslVersionBlacklist) { + sslVersionBlacklistLocal.add(sslVersion.trim().toLowerCase()); + } + SSLServerSocket sslServerSocket = (SSLServerSocket) thriftServerSocket.getServerSocket(); + List<String> enabledProtocols = new ArrayList<String>(); + for (String protocol : sslServerSocket.getEnabledProtocols()) { + if (sslVersionBlacklistLocal.contains(protocol.toLowerCase())) { + LOG.debug("Disabling SSL Protocol: " + protocol); + } else { + enabledProtocols.add(protocol); + } + } + sslServerSocket.setEnabledProtocols(enabledProtocols.toArray(new String[0])); + LOG.info("SSL Server Socket Enabled Protocols: " + + Arrays.toString(sslServerSocket.getEnabledProtocols())); + } + return thriftServerSocket; + } +} http://git-wip-us.apache.org/repos/asf/hive/blob/2f40ac3a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java ---------------------------------------------------------------------- diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 6f168b5..15de10b 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -904,6 +904,18 @@ public class HiveConf extends Configuration { METASTORE_INIT_METADATA_COUNT_ENABLED("hive.metastore.initial.metadata.count.enabled", true, "Enable a metadata count at metastore startup for metrics."), + // Metastore SSL settings + HIVE_METASTORE_USE_SSL("hive.metastore.use.SSL", false, + "Set this to true for using SSL encryption in HMS server."), + HIVE_METASTORE_SSL_KEYSTORE_PATH("hive.metastore.keystore.path", "", + "Metastore SSL certificate keystore location."), + HIVE_METASTORE_SSL_KEYSTORE_PASSWORD("hive.metastore.keystore.password", "", + "Metastore SSL certificate keystore password."), + HIVE_METASTORE_SSL_TRUSTSTORE_PATH("hive.metastore.truststore.path", "", + "Metastore SSL certificate keystore location."), + HIVE_METASTORE_SSL_TRUSTSTORE_PASSWORD("hive.metastore.truststore.password", "", + "Metastore SSL certificate keystore password."), + // Parameters for exporting metadata on table drop (requires the use of the) // org.apache.hadoop.hive.ql.parse.MetaDataExportListener preevent listener METADATA_EXPORT_LOCATION("hive.metadata.export.location", "", http://git-wip-us.apache.org/repos/asf/hive/blob/2f40ac3a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java ---------------------------------------------------------------------- diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java index 4036b53..0a53259 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java @@ -433,6 +433,60 @@ public class TestSSL { + LOCALHOST_KEY_STORE_NAME); } + /** + * Test HMS server with SSL + * @throws Exception + */ + @Test + public void testMetastoreWithSSL() throws Exception { + setMetastoreSslConf(conf); + setSslConfOverlay(confOverlay); + // Test in http mode + setHttpConfOverlay(confOverlay); + miniHS2 = new MiniHS2.Builder().withRemoteMetastore().withConf(conf).cleanupLocalDirOnStartup(false).build(); + miniHS2.start(confOverlay); + + String tableName = "sslTab"; + Path dataFilePath = new Path(dataFileDir, "kv1.txt"); + + // make SSL connection + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), + System.getProperty("user.name"), "bar"); + + // Set up test data + setupTestTableWithData(tableName, dataFilePath, hs2Conn); + Statement stmt = hs2Conn.createStatement(); + ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName); + int rowCount = 0; + while (res.next()) { + ++rowCount; + assertEquals("val_" + res.getInt(1), res.getString(2)); + } + // read result over SSL + assertEquals(500, rowCount); + + hs2Conn.close(); + } + + /** + * Verify the HS2 can't connect to HMS if the certificate doesn't match + * @throws Exception + */ + @Test + public void testMetastoreConnectionWrongCertCN() throws Exception { + setMetastoreSslConf(conf); + conf.setVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH, + dataFileDir + File.separator + EXAMPLEDOTCOM_KEY_STORE_NAME); + miniHS2 = new MiniHS2.Builder().withRemoteMetastore().withConf(conf).cleanupLocalDirOnStartup(false).build(); + try { + miniHS2.start(confOverlay); + } catch (java.net.ConnectException e) { + assertTrue(e.toString().contains("Connection refused")); + } + + miniHS2.stop(); + } + private void setupTestTableWithData(String tableName, Path dataFilePath, Connection hs2Conn) throws Exception { Statement stmt = hs2Conn.createStatement(); @@ -456,6 +510,18 @@ public class TestSSL { KEY_STORE_TRUST_STORE_PASSWORD); } + private void setMetastoreSslConf(HiveConf conf) { + conf.setBoolVar(ConfVars.HIVE_METASTORE_USE_SSL, true); + conf.setVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH, + dataFileDir + File.separator + LOCALHOST_KEY_STORE_NAME); + conf.setVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD, + KEY_STORE_TRUST_STORE_PASSWORD); + conf.setVar(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH, + dataFileDir + File.separator + TRUST_STORE_NAME); + conf.setVar(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PASSWORD, + KEY_STORE_TRUST_STORE_PASSWORD); + } + private void clearSslConfOverlay(Map<String, String> confOverlay) { confOverlay.put(ConfVars.HIVE_SERVER2_USE_SSL.varname, "false"); } http://git-wip-us.apache.org/repos/asf/hive/blob/2f40ac3a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java ---------------------------------------------------------------------- diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java index f7163cf..1581f9b 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java @@ -31,6 +31,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.auth.HiveAuthUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; @@ -43,7 +44,6 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; import org.apache.hive.jdbc.HttpBasicAuthInterceptor; -import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes; import org.apache.hive.service.rpc.thrift.TCLIService; import org.apache.hive.service.rpc.thrift.TExecuteStatementReq; @@ -210,7 +210,7 @@ public class TestThriftHttpCLIServiceFeatures { } private TTransport getRawBinaryTransport() throws Exception { - return HiveAuthFactory.getSocketTransport(ThriftCLIServiceTest.host, ThriftCLIServiceTest.port, 0); + return HiveAuthUtils.getSocketTransport(ThriftCLIServiceTest.host, ThriftCLIServiceTest.port, 0); } private static TTransport getHttpTransport() throws Exception { http://git-wip-us.apache.org/repos/asf/hive/blob/2f40ac3a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java ---------------------------------------------------------------------- diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index ce85320..d6cf744 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -19,6 +19,7 @@ package org.apache.hive.jdbc; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.common.auth.HiveAuthUtils; import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.KerberosSaslHelper; @@ -487,14 +488,14 @@ public class HiveConnection implements java.sql.Connection { JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD); if (sslTrustStore == null || sslTrustStore.isEmpty()) { - transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout); + transport = HiveAuthUtils.getSSLSocket(host, port, loginTimeout); } else { - transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout, + transport = HiveAuthUtils.getSSLSocket(host, port, loginTimeout, sslTrustStore, sslTrustStorePassword); } } else { // get non-SSL socket transport - transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout); + transport = HiveAuthUtils.getSocketTransport(host, port, loginTimeout); } return transport; } http://git-wip-us.apache.org/repos/asf/hive/blob/2f40ac3a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java ---------------------------------------------------------------------- diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 530d2f4..1ea94c2 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -36,7 +36,9 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.JvmPauseMonitor; import org.apache.hadoop.hive.common.LogUtils; +import org.apache.hadoop.hive.common.ServerUtils; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.auth.HiveAuthUtils; import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; @@ -111,7 +113,6 @@ import org.apache.thrift.server.TServerEventHandler; import org.apache.thrift.server.TThreadPoolServer; import org.apache.thrift.transport.TFramedTransport; import org.apache.thrift.transport.TServerSocket; -import org.apache.thrift.transport.TServerTransport; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportFactory; import org.slf4j.Logger; @@ -120,6 +121,9 @@ import org.slf4j.LoggerFactory; import javax.jdo.JDOException; import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.text.DateFormat; import java.text.SimpleDateFormat; @@ -6751,9 +6755,9 @@ public class HiveMetaStore extends ThriftHiveMetastore { boolean tcpKeepAlive = conf.getBoolVar(HiveConf.ConfVars.METASTORE_TCP_KEEP_ALIVE); boolean useFramedTransport = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_FRAMED_TRANSPORT); boolean useCompactProtocol = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_COMPACT_PROTOCOL); + boolean useSSL = conf.getBoolVar(ConfVars.HIVE_METASTORE_USE_SSL); useSasl = conf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL); - TProcessor processor; TTransportFactory transFactory; final TProtocolFactory protocolFactory; @@ -6768,6 +6772,8 @@ public class HiveMetaStore extends ThriftHiveMetastore { HMSHandler baseHandler = new HiveMetaStore.HMSHandler("new db based metaserver", conf, false); IHMSHandler handler = newRetryingHMSHandler(baseHandler, conf); + TServerSocket serverSocket = null; + if (useSasl) { // we are in secure mode. if (useFramedTransport) { @@ -6785,6 +6791,8 @@ public class HiveMetaStore extends ThriftHiveMetastore { MetaStoreUtils.getMetaStoreSaslProperties(conf)); processor = saslServer.wrapProcessor( new ThriftHiveMetastore.Processor<IHMSHandler>(handler)); + serverSocket = HiveAuthUtils.getServerSocket(null, port); + LOG.info("Starting DB backed MetaStore Server in Secure Mode"); } else { // we are in unsecure mode. @@ -6802,12 +6810,32 @@ public class HiveMetaStore extends ThriftHiveMetastore { processor = new TSetIpAddressProcessor<IHMSHandler>(handler); LOG.info("Starting DB backed MetaStore Server"); } + + // enable SSL support for HMS + List<String> sslVersionBlacklist = new ArrayList<String>(); + for (String sslVersion : conf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",")) { + sslVersionBlacklist.add(sslVersion); + } + if (!useSSL) { + serverSocket = HiveAuthUtils.getServerSocket(null, port); + } else { + String keyStorePath = conf.getVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH).trim(); + if (keyStorePath.isEmpty()) { + throw new IllegalArgumentException(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname + + " Not configured for SSL connection"); + } + String keyStorePassword = ShimLoader.getHadoopShims().getPassword(conf, + HiveConf.ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname); + serverSocket = HiveAuthUtils.getServerSSLSocket(null, port, keyStorePath, + keyStorePassword, sslVersionBlacklist); + } + } + + if (tcpKeepAlive) { + serverSocket = new TServerSocketKeepAlive(serverSocket); } - - TServerTransport serverTransport = tcpKeepAlive ? - new TServerSocketKeepAlive(port) : new TServerSocket(port); - TThreadPoolServer.Args args = new TThreadPoolServer.Args(serverTransport) + TThreadPoolServer.Args args = new TThreadPoolServer.Args(serverSocket) .processor(processor) .transportFactory(transFactory) .protocolFactory(protocolFactory) http://git-wip-us.apache.org/repos/asf/hive/blob/2f40ac3a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java ---------------------------------------------------------------------- diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index c32486f..b04b68a 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hive.metastore; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.common.ValidTxnList; +import org.apache.hadoop.hive.common.auth.HiveAuthUtils; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public; import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable; @@ -139,6 +140,7 @@ import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TCompactProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.TFramedTransport; +import org.apache.thrift.transport.TSSLTransportFactory; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; @@ -425,6 +427,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient { private void open() throws MetaException { isConnected = false; TTransportException tte = null; + boolean useSSL = conf.getBoolVar(ConfVars.HIVE_METASTORE_USE_SSL); boolean useSasl = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL); boolean useFramedTransport = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_FRAMED_TRANSPORT); boolean useCompactProtocol = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_COMPACT_PROTOCOL); @@ -434,8 +437,8 @@ public class HiveMetaStoreClient implements IMetaStoreClient { for (int attempt = 0; !isConnected && attempt < retries; ++attempt) { for (URI store : metastoreUris) { LOG.info("Trying to connect to metastore with URI " + store); + try { - transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout); if (useSasl) { // Wrap thrift connection with SASL for secure connection. try { @@ -450,6 +453,8 @@ public class HiveMetaStoreClient implements IMetaStoreClient { String tokenSig = conf.getVar(ConfVars.METASTORE_TOKEN_SIGNATURE); // tokenSig could be null tokenStrForm = Utils.getTokenStrForm(tokenSig); + transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout); + if(tokenStrForm != null) { // authenticate using delegation tokens via the "DIGEST" mechanism transport = authBridge.createClientTransport(null, store.getHost(), @@ -466,9 +471,35 @@ public class HiveMetaStoreClient implements IMetaStoreClient { LOG.error("Couldn't create client transport", ioe); throw new MetaException(ioe.toString()); } - } else if (useFramedTransport) { - transport = new TFramedTransport(transport); + } else { + if (useSSL) { + try { + String trustStorePath = conf.getVar(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH).trim(); + if (trustStorePath.isEmpty()) { + throw new IllegalArgumentException(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH.varname + + " Not configured for SSL connection"); + } + String trustStorePassword = ShimLoader.getHadoopShims().getPassword(conf, + HiveConf.ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PASSWORD.varname); + + // Create an SSL socket and connect + transport = HiveAuthUtils.getSSLSocket(store.getHost(), store.getPort(), clientSocketTimeout, trustStorePath, trustStorePassword ); + LOG.info("Opened an SSL connection to metastore, current connections: " + connCount.incrementAndGet()); + } catch(IOException e) { + throw new IllegalArgumentException(e); + } catch(TTransportException e) { + tte = e; + throw new MetaException(e.toString()); + } + } else { + transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout); + } + + if (useFramedTransport) { + transport = new TFramedTransport(transport); + } } + final TProtocol protocol; if (useCompactProtocol) { protocol = new TCompactProtocol(transport); @@ -477,8 +508,10 @@ public class HiveMetaStoreClient implements IMetaStoreClient { } client = new ThriftHiveMetastore.Client(protocol); try { - transport.open(); - LOG.info("Opened a connection to metastore, current connections: " + connCount.incrementAndGet()); + if (!transport.isOpen()) { + transport.open(); + LOG.info("Opened a connection to metastore, current connections: " + connCount.incrementAndGet()); + } isConnected = true; } catch (TTransportException e) { tte = e; http://git-wip-us.apache.org/repos/asf/hive/blob/2f40ac3a/metastore/src/java/org/apache/hadoop/hive/metastore/TServerSocketKeepAlive.java ---------------------------------------------------------------------- diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/TServerSocketKeepAlive.java b/metastore/src/java/org/apache/hadoop/hive/metastore/TServerSocketKeepAlive.java index 9ac18dc..95bd76e 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/TServerSocketKeepAlive.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/TServerSocketKeepAlive.java @@ -30,8 +30,8 @@ import org.apache.thrift.transport.TTransportException; * */ public class TServerSocketKeepAlive extends TServerSocket { - public TServerSocketKeepAlive(int port) throws TTransportException { - super(port, 0); + public TServerSocketKeepAlive(TServerSocket serverSocket) throws TTransportException { + super(serverSocket.getServerSocket()); } @Override http://git-wip-us.apache.org/repos/asf/hive/blob/2f40ac3a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java index 168ba35..dcb6338 100644 --- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java +++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java @@ -20,26 +20,15 @@ package org.apache.hive.service.auth; import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION; import java.io.IOException; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; -import java.util.List; import java.util.Map; -import javax.net.ssl.SSLParameters; -import javax.net.ssl.SSLServerSocket; -import javax.net.ssl.SSLSocket; import javax.security.auth.login.LoginException; import javax.security.sasl.AuthenticationException; import javax.security.sasl.Sasl; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.metastore.IMetaStoreClient; -import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim; import org.apache.hadoop.hive.shims.ShimLoader; @@ -54,11 +43,7 @@ import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.thrift.ThriftCLIService; import org.apache.thrift.TProcessorFactory; -import org.apache.thrift.transport.TSSLTransportFactory; import org.apache.thrift.transport.TSaslServerTransport; -import org.apache.thrift.transport.TServerSocket; -import org.apache.thrift.transport.TSocket; -import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import org.apache.thrift.transport.TTransportFactory; import org.slf4j.Logger; @@ -264,87 +249,6 @@ public class HiveAuthFactory { } } - public static TTransport getSocketTransport(String host, int port, int loginTimeout) { - return new TSocket(host, port, loginTimeout); - } - - public static TTransport getSSLSocket(String host, int port, int loginTimeout) - throws TTransportException { - // The underlying SSLSocket object is bound to host:port with the given SO_TIMEOUT - TSocket tSSLSocket = TSSLTransportFactory.getClientSocket(host, port, loginTimeout); - return getSSLSocketWithHttps(tSSLSocket); - } - - public static TTransport getSSLSocket(String host, int port, int loginTimeout, - String trustStorePath, String trustStorePassWord) throws TTransportException { - TSSLTransportFactory.TSSLTransportParameters params = - new TSSLTransportFactory.TSSLTransportParameters(); - params.setTrustStore(trustStorePath, trustStorePassWord); - params.requireClientAuth(true); - // The underlying SSLSocket object is bound to host:port with the given SO_TIMEOUT and - // SSLContext created with the given params - TSocket tSSLSocket = TSSLTransportFactory.getClientSocket(host, port, loginTimeout, params); - return getSSLSocketWithHttps(tSSLSocket); - } - - // Using endpoint identification algorithm as HTTPS enables us to do - // CNAMEs/subjectAltName verification - private static TSocket getSSLSocketWithHttps(TSocket tSSLSocket) throws TTransportException { - SSLSocket sslSocket = (SSLSocket) tSSLSocket.getSocket(); - SSLParameters sslParams = sslSocket.getSSLParameters(); - sslParams.setEndpointIdentificationAlgorithm("HTTPS"); - sslSocket.setSSLParameters(sslParams); - return new TSocket(sslSocket); - } - - public static TServerSocket getServerSocket(String hiveHost, int portNum) - throws TTransportException { - InetSocketAddress serverAddress; - if (hiveHost == null || hiveHost.isEmpty()) { - // Wildcard bind - serverAddress = new InetSocketAddress(portNum); - } else { - serverAddress = new InetSocketAddress(hiveHost, portNum); - } - return new TServerSocket(serverAddress); - } - - public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, String keyStorePath, - String keyStorePassWord, List<String> sslVersionBlacklist) throws TTransportException, - UnknownHostException { - TSSLTransportFactory.TSSLTransportParameters params = - new TSSLTransportFactory.TSSLTransportParameters(); - params.setKeyStore(keyStorePath, keyStorePassWord); - InetSocketAddress serverAddress; - if (hiveHost == null || hiveHost.isEmpty()) { - // Wildcard bind - serverAddress = new InetSocketAddress(portNum); - } else { - serverAddress = new InetSocketAddress(hiveHost, portNum); - } - TServerSocket thriftServerSocket = - TSSLTransportFactory.getServerSocket(portNum, 0, serverAddress.getAddress(), params); - if (thriftServerSocket.getServerSocket() instanceof SSLServerSocket) { - List<String> sslVersionBlacklistLocal = new ArrayList<String>(); - for (String sslVersion : sslVersionBlacklist) { - sslVersionBlacklistLocal.add(sslVersion.trim().toLowerCase()); - } - SSLServerSocket sslServerSocket = (SSLServerSocket) thriftServerSocket.getServerSocket(); - List<String> enabledProtocols = new ArrayList<String>(); - for (String protocol : sslServerSocket.getEnabledProtocols()) { - if (sslVersionBlacklistLocal.contains(protocol.toLowerCase())) { - LOG.debug("Disabling SSL Protocol: " + protocol); - } else { - enabledProtocols.add(protocol); - } - } - sslServerSocket.setEnabledProtocols(enabledProtocols.toArray(new String[0])); - LOG.info("SSL Server Socket Enabled Protocols: " - + Arrays.toString(sslServerSocket.getEnabledProtocols())); - } - return thriftServerSocket; - } - // retrieve delegation token for the given user public String getDelegationToken(String owner, String renewer, String remoteAddr) throws HiveSQLException { http://git-wip-us.apache.org/repos/asf/hive/blob/2f40ac3a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java index d9c7b2e..94613d8 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java @@ -24,6 +24,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.TimeUnit; +import org.apache.hadoop.hive.common.auth.HiveAuthUtils; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -74,7 +75,7 @@ public class ThriftBinaryCLIService extends ThriftCLIService { sslVersionBlacklist.add(sslVersion); } if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL)) { - serverSocket = HiveAuthFactory.getServerSocket(hiveHost, portNum); + serverSocket = HiveAuthUtils.getServerSocket(hiveHost, portNum); } else { String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim(); if (keyStorePath.isEmpty()) { @@ -83,7 +84,7 @@ public class ThriftBinaryCLIService extends ThriftCLIService { } String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf, HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname); - serverSocket = HiveAuthFactory.getServerSSLSocket(hiveHost, portNum, keyStorePath, + serverSocket = HiveAuthUtils.getServerSSLSocket(hiveHost, portNum, keyStorePath, keyStorePassword, sslVersionBlacklist); }
